198199200201202203204205206207208
try { Job job = mr.createJob(); job.waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 0; }
225226227228229230231232233234235
mr.setTupleCombiner(new CountReducer()); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 1; } public static void main(String[] args) throws Exception {
151152153154155156157158159160161
mr.addInput(new Path(input2), new HadoopInputFormat(TextInputFormat.class), new UrlProcessor()); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 1; }
120121122123124125126127128129130
try { Job hadoopJob = job.createJob(); hadoopJob.waitForCompletion(true); } finally { job.cleanUpInstanceFiles(); } return 0; } public MultiShakespeareIndexer() {
110111112113114115116117118119120
NO_QUOTE_CHARACTER, NO_ESCAPE_CHARACTER), ITuple.class, NullWritable.class); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 1; }
111112113114115116117118119120121
builder.addInput(new Path(input), new HadoopInputFormat(TextInputFormat.class), new IProcessor()); try { builder.createJob().waitForCompletion(true); } finally { builder.cleanUpInstanceFiles(); } return 1; }
211212213214215216217218219220221
new UserActivityProcessor()); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 1; } public UserActivityNormalizer() {
180181182183184185186187188189190
NullWritable.class); mr.addInput(new Path(input), new HadoopInputFormat(TextInputFormat.class), new TweetsProcessor()); try { mr.createJob().waitForCompletion(true); } finally { mr.cleanUpInstanceFiles(); } return 0; }
149150151152153154155156157158159
job.setOutput(new Path(output), new HadoopOutputFormat(TextOutputFormat.class), Text.class, NullWritable.class); try { job.createJob().waitForCompletion(true); } finally { job.cleanUpInstanceFiles(); } delete(input); return 0; }