MRUnit을 이용한 Unit Test
wordCountTest
package com.fastcampus.hadoop;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mrunit.mapreduce.MapDriver;
import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
import org.apache.hadoop.mrunit.types.Pair;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
public class WordCountTest {
MapDriver<Object, Text, Text, IntWritable> mapDriver;
ReduceDriver<Text, IntWritable, Text, IntWritable> reduceDriver;
MapReduceDriver<Object, Text, Text, IntWritable, Text, IntWritable> mapReduceDriver;
@Before
public void Setup() {
mapDriver = new MapDriver<>(new WordCount.TokenizerMapper());
reduceDriver = new ReduceDriver<Text, IntWritable, Text, IntWritable>(new WordCount.IntSumReducer());
mapReduceDriver = new MapReduceDriver<>(new WordCount.TokenizerMapper(), new WordCount.IntSumReducer());
}
@Test
public void wordCountMapTest() throws IOException {
new MapDriver<Object, Text, Text, IntWritable>()
.withMapper(new WordCount.TokenizerMapper())
.withInput(new LongWritable(0L), new Text("dog dog cat cat owl cat"))
.withOutput(new Text("dog"), new IntWritable(1))
.withOutput(new Text("dog"), new IntWritable(1))
.withOutput(new Text("cat"), new IntWritable(1))
.withOutput(new Text("cat"), new IntWritable(1))
.withOutput(new Text("owl"), new IntWritable(1))
.withOutput(new Text("cat"), new IntWritable(1))
.runTest();
}
@Test
public void wordCountMapTest2() throws IOException {
List<Pair<Text, IntWritable>> result = mapDriver.withInput(new LongWritable(0L), new Text("dog dog cat cat owl cat"))
.run();
System.out.println(result);
}
@Test
public void wordCountReduceTest() throws IOException {
new ReduceDriver<Text, IntWritable, Text, IntWritable>()
.withReducer(new WordCount.IntSumReducer())
.withInput(new Text("cat"), Arrays.asList(new IntWritable(1), new IntWritable(1), new IntWritable(1)))
.withInput(new Text("dog"), Arrays.asList(new IntWritable(1), new IntWritable(1)))
.withInput(new Text("owl"), Arrays.asList(new IntWritable(1)))
.withOutput(new Text("cat"), new IntWritable(3))
.withOutput(new Text("dog"), new IntWritable(2))
.withOutput(new Text("owl"), new IntWritable(1))
.runTest();
}
@Test
public void wordCountReduceTest2() throws IOException {
List<Pair<Text, IntWritable>> result = reduceDriver
.withInput(new Text("cat"), Arrays.asList(new IntWritable(1), new IntWritable(1), new IntWritable(1)))
.withInput(new Text("dog"), Arrays.asList(new IntWritable(1), new IntWritable(1)))
.withInput(new Text("owl"), Arrays.asList(new IntWritable(1)))
.run();
System.out.println(result);
}
@Test
public void wordCountTest() throws IOException {
mapReduceDriver
.withInput(new LongWritable(0L), new Text("dog dog cat cat owl cat"))
.withOutput(new Text("cat"), new IntWritable(3))
.withOutput(new Text("dog"), new IntWritable(2))
.withOutput(new Text("owl"), new IntWritable(1))
.runTest();;
}
@Test
public void wordCountTest2() throws IOException {
List<Pair<Text, IntWritable>> result = mapReduceDriver
.withInput(new LongWritable(0L), new Text("dog dog cat cat owl cat"))
.run();
System.out.println(result);
}
@Test
public void wordCountWithCounter() throws IOException {
MapDriver<Object, Text, Text, IntWritable> mapDriver1 = new MapDriver<>(new WordCountWithCounter.TokenizerMapper());
mapDriver1
.withInput(new LongWritable(0L), new Text("'hello' 'world fastcampus hadoop !!'"))
.run();
System.out.println(mapDriver1.getCounters().findCounter(WordCountWithCounter.Word.WITHOUT_SPECIAL_CHARACTER).getValue());
System.out.println(mapDriver1.getCounters().findCounter(WordCountWithCounter.Word.WITH_SPECIAL_CHARACTER).getValue());
}
}
Mockito를 이용한 Unit Test
wordCountTest With mockito
package com.fastcampus.hadoop;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.Test;
import com.fastcampus.hadoop.WordCount.TokenizerMapper;
import org.mockito.InOrder;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static org.mockito.Mockito.*;
public class WordCountTestWithMockito {
@Test
public void wordCountMapTest() throws IOException, InterruptedException {
TokenizerMapper mapper = new TokenizerMapper();
Mapper.Context context = mock(Mapper.Context.class);
mapper.word = mock(Text.class);
mapper.map(new LongWritable(0), new Text("dog dog cat"), context);
InOrder inOrder = inOrder(mapper.word, context);
inOrder.verify(mapper.word).set(eq("dog"));
inOrder.verify(context).write(eq(mapper.word), eq(new IntWritable(1)));
inOrder.verify(mapper.word).set(eq("dog"));
inOrder.verify(context).write(eq(mapper.word), eq(new IntWritable(1)));
inOrder.verify(mapper.word).set(eq("cat"));
inOrder.verify(context).write(eq(mapper.word), eq(new IntWritable(1)));
}
@Test
public void wordCountReduceTest() throws IOException, InterruptedException {
WordCount.IntSumReducer reducer = new WordCount.IntSumReducer();
Reducer.Context context = mock(Reducer.Context.class);
List<IntWritable> values = Arrays.asList(new IntWritable(1), new IntWritable(1));
reducer.reduce(new Text("dog"), values, context);
verify(context).write(new Text("dog"), new IntWritable(2));
}
@Test
public void counterTest() throws IOException, InterruptedException {
WordCountWithCounter.TokenizerMapper mapper = new WordCountWithCounter.TokenizerMapper();
Mapper.Context context = mock(Mapper.Context.class);
Counter counter = mock(Counter.class);
when(context.getCounter(WordCountWithCounter.Word.WITHOUT_SPECIAL_CHARACTER)).thenReturn(counter);
mapper.map(new LongWritable(0), new Text("dog dog cat"), context);
verify(counter, times(3)).increment(1);
}
}