@Test
@TestHadoop
public void testHadoopMapReduce() throws Exception {
JobConf conf = getHadoopConf();
FileSystem fs = FileSystem.get(conf);
JobClient jobClient = new JobClient(conf);
try {
Path inputDir = new Path(getHadoopTestDir(), "input");
Path outputDir = new Path(getHadoopTestDir(), "output");
fs.mkdirs(inputDir);
Writer writer = new OutputStreamWriter(fs.create(new Path(inputDir, "data.txt")));
writer.write("a\n");
writer.write("b\n");
writer.write("c\n");
writer.close();
JobConf jobConf = getHadoopConf();
jobConf.setInt("mapred.map.tasks", 1);
jobConf.setInt("mapred.map.max.attempts", 1);
jobConf.setInt("mapred.reduce.max.attempts", 1);
jobConf.set("mapred.input.dir", inputDir.toString());
jobConf.set("mapred.output.dir", outputDir.toString());
final RunningJob runningJob = jobClient.submitJob(jobConf);
waitFor(60 * 1000, true, new Predicate() {
@Override
public boolean evaluate() throws Exception {
return runningJob.isComplete();
}
});
Assert.assertTrue(runningJob.isSuccessful());
Assert.assertTrue(fs.exists(new Path(outputDir, "part-00000")));
BufferedReader reader =
new BufferedReader(new InputStreamReader(fs.open(new Path(outputDir, "part-00000"))));
Assert.assertTrue(reader.readLine().trim().endsWith("a"));
Assert.assertTrue(reader.readLine().trim().endsWith("b"));
Assert.assertTrue(reader.readLine().trim().endsWith("c"));
Assert.assertNull(reader.readLine());
reader.close();
}
finally {
fs.close();
jobClient.close();
}
}