180181182183184185186187188189190
NullWritable.class); mr.addInput(new Path(input), new HadoopInputFormat(TextInputFormat.class), new TweetsProcessor()); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 0; }
120121122123124125126127128129130
try { Job hadoopJob = job.createJob(); hadoopJob.waitForCompletion(true); } finally { job.cleanUpInstanceFiles(); } return 0; } public MultiShakespeareIndexer() {
149150151152153154155156157158159
job.setOutput(new Path(output), new HadoopOutputFormat(TextOutputFormat.class), Text.class, NullWritable.class); try { job.createJob().waitForCompletion(true); } finally { job.cleanUpInstanceFiles(); } delete(input); return 0; }
132133134135136137138139140141142
DoubleWritable.class); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 1; } public static void main(String[] args) throws Exception {
118119120121122123124125126127128
builder.setTupleReducer(new TopNWords(n)); try { builder.createJob().waitForCompletion(true); } finally { builder.cleanUpInstanceFiles(); } return 1; }
199200201202203204205206207208209
try { Job job = mr.createJob(); job.waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 0; }
90919293949596979899100
cg.setTupleCombiner(new CountReducer()); try { cg.createJob().waitForCompletion(true); } finally { cg.cleanUpInstanceFiles(); } return 1; }
103104105106107108109110111112113
try { if(job.createJob().waitForCompletion(true)) { return 1; } } finally { job.cleanUpInstanceFiles(); } return -1; } public static void main(String[] args) throws Exception {
135136137138139140141142143144145
mr.setTupleCombiner(new CountReducer()); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 1; }
152153154155156157158159160161162
mr.addInput(new Path(input2), new HadoopInputFormat(TextInputFormat.class), new UrlProcessor()); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 1; }