Package com.datasalt.pangool.tuplemr.mapred.lib.output

Examples of com.datasalt.pangool.tuplemr.mapred.lib.output.HadoopOutputFormat


    builder.setSpecificOrderBy("user", new OrderBy().add("money", Order.ASC));

    builder.addInput(new Path("test-input"), new HadoopInputFormat(TextInputFormat.class),
        new FirstInputProcessor());
    builder.setTupleReducer(new MyGroupHandler());
    builder.setOutput(new Path("test-output"), new HadoopOutputFormat(TextOutputFormat.class),
        NullWritable.class, NullWritable.class);

    Job job = builder.createJob();
    try {
      assertRun(job);
View Full Code Here


    builder.setGroupByFields("country", "age", "name");
    builder.setOrderBy(new OrderBy().add("country", Order.ASC).add("age", Order.ASC)
        .add("name", Order.ASC).add("height", Order.DESC));
    builder.setRollupFrom("country");
    builder.setTupleReducer(new IdentityRed());
    builder.setOutput(outputPath, new HadoopOutputFormat(SequenceFileOutputFormat.class), Text.class,
        Text.class);
    builder.addInput(new Path(input), new HadoopInputFormat(SequenceFileInputFormat.class), new Map());

    Job job = builder.createJob();
    try {
View Full Code Here

    builder.setGroupByFields("age", "name", "country");
    builder.setOrderBy(new OrderBy().add("country", Order.ASC).add("age", Order.ASC)
        .add("name", Order.ASC).add("height", Order.DESC));
    builder.setRollupFrom("age");
    builder.setTupleReducer(new IdentityRed());
    builder.setOutput(outputPath, new HadoopOutputFormat(SequenceFileOutputFormat.class), Text.class,
        Text.class);
    builder.addInput(new Path(input), new HadoopInputFormat(SequenceFileInputFormat.class), new Map());

    Job job = builder.createJob();
    try {
View Full Code Here

    builder.setGroupByFields("age", "name", "country");
    builder.setOrderBy(new OrderBy().add("country", Order.ASC).add("age", Order.ASC)
        .add("name", Order.ASC));
    builder.setRollupFrom("age");
    builder.setTupleReducer(new IdentityRed());
    builder.setOutput(outputPath, new HadoopOutputFormat(SequenceFileOutputFormat.class), Text.class,
        Text.class);
    builder.addInput(new Path(input), new HadoopInputFormat(SequenceFileInputFormat.class),
        new DoNothingMap());

    Job job = builder.createJob();
View Full Code Here

        assertEquals(4.6, ((ITuple)currentTuple.get("tuple2")).get("c"));
        assertEquals(false, ((ITuple)currentTuple.get("tuple2")).get("d"));
      };
    });
    builder.setGroupByFields("partitionId");
    builder.setOutput(new Path(OUTPUT), new HadoopOutputFormat(TextOutputFormat.class), Text.class, NullWritable.class);
    Job job = builder.createJob();
    try {
      job.waitForCompletion(true);
    } finally {
      builder.cleanUpInstanceFiles();
View Full Code Here

      protected void map(LongWritable key, Text value, Context context) throws IOException ,InterruptedException {
        value.set(value.toString() + "\t" + classify(value.toString()));
        context.write(value, NullWritable.get());
      }
    });
    job.setOutput(new Path(output), new HadoopOutputFormat(TextOutputFormat.class), Text.class, NullWritable.class);
    job.addInput(new Path(input), new HadoopInputFormat(TextInputFormat.class));
    job.createJob().waitForCompletion(true);
   
    return 1;
  }
View Full Code Here

    }
    // Set output path
    Path outReservoirPath = new Path(outputPath + "-reservoir");
    builder.setOutput(
        outReservoirPath,
        new HadoopOutputFormat(SequenceFileOutputFormat.class),
        Text.class,
        NullWritable.class);
    builder.setJarByClass(callingClass);

    try {
View Full Code Here

    delete(output);
   
    init(conf, new Path(modelFolder));
   
    MapOnlyJobBuilder job = new MapOnlyJobBuilder(conf);
    job.setOutput(new Path(output), new HadoopOutputFormat(TextOutputFormat.class), Text.class, NullWritable.class);
    job.addInput(new Path(input), new HadoopInputFormat(TextInputFormat.class), new MapOnlyMapper<LongWritable, Text, Text, NullWritable>() {
      protected void map(LongWritable key, Text value, Context context) throws IOException ,InterruptedException {
        value.set(value.toString() + "\t" + classify(value.toString()));
        context.write(value, NullWritable.get());
      }
View Full Code Here

TOP

Related Classes of com.datasalt.pangool.tuplemr.mapred.lib.output.HadoopOutputFormat

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.