Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.IntWritable


    driver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
    mapDriver = MapDriver.newMapDriver(mapper);
    reduceDriver = ReduceDriver.newReduceDriver(reducer);
   
    expectedOutput = new ArrayList<Pair<Text, IntWritable>>();
    expectedOutput.add(new Pair<Text, IntWritable>(new Text("Bye"), new IntWritable(ONE)));
    expectedOutput.add(new Pair<Text, IntWritable>(new Text("Goodbye"), new IntWritable(ONE)));
    expectedOutput.add(new Pair<Text, IntWritable>(new Text("Hadoop"), new IntWritable(TWO)));
    expectedOutput.add(new Pair<Text, IntWritable>(new Text("Hello"), new IntWritable(TWO)));
    expectedOutput.add(new Pair<Text, IntWritable>(new Text("World"), new IntWritable(TWO)));
  }
View Full Code Here


    final List<Pair<LongWritable, Text>> inputs = new ArrayList<Pair<LongWritable, Text>>();
    inputs.add(new Pair<LongWritable, Text>(new LongWritable(21), new Text(FILE01)));
    inputs.add(new Pair<LongWritable, Text>(new LongWritable(48), new Text(FILE02)));

    final List<Pair<Text, IntWritable>> outputs = new ArrayList<Pair<Text, IntWritable>>();
    outputs.add(new Pair<Text, IntWritable>(new Text("Hello"), new IntWritable(ONE)));
    outputs.add(new Pair<Text, IntWritable>(new Text("World"), new IntWritable(ONE)));
    outputs.add(new Pair<Text, IntWritable>(new Text("Bye"), new IntWritable(ONE)));
    outputs.add(new Pair<Text, IntWritable>(new Text("World"), new IntWritable(ONE)));
    outputs.add(new Pair<Text, IntWritable>(new Text("Hello"), new IntWritable(ONE)));
    outputs.add(new Pair<Text, IntWritable>(new Text("Hadoop"), new IntWritable(ONE)));
    outputs.add(new Pair<Text, IntWritable>(new Text("Goodbye"), new IntWritable(ONE)));
    outputs.add(new Pair<Text, IntWritable>(new Text("Hadoop"), new IntWritable(ONE)));

    mapDriver.withAll(inputs).withAllOutput(outputs).runTest(true);
  }
View Full Code Here

  }

  @Test
  public void TestReduceDriver() throws IOException {
    final List<IntWritable> input1 = new ArrayList<IntWritable>();
    input1.add(new IntWritable(ONE));
   
    final List<IntWritable> input2 = new ArrayList<IntWritable>();
    input2.add(new IntWritable(ONE));
    input2.add(new IntWritable(ONE));
   
    final List<Pair<Text, List<IntWritable>>> inputs = new ArrayList<Pair<Text, List<IntWritable>>>();
    inputs.add(new Pair<Text, List<IntWritable>>(new Text("Bye"), input1));
    inputs.add(new Pair<Text, List<IntWritable>>(new Text("Goodbye"), input1));
    inputs.add(new Pair<Text, List<IntWritable>>(new Text("Hadoop"), input2));
View Full Code Here

      for (IntWritable val : values) {
        sum += val.get();
      }

      context.write(key, new IntWritable(sum));
    }
View Full Code Here

   *          the Hadoop job Map context
   */
  @Override
  protected void cleanup(Context context) throws IOException,
      InterruptedException {
    context.write(KEY, new IntWritable(this.someState));
  }
View Full Code Here

        .newMapReduceDriver(new InverseMapper<IntWritable, Integer>(),
            new IntSumReducer<Integer>()).withConfiguration(conf);
    driver
        .setKeyGroupingComparator(org.apache.hadoop.mrunit.TestMapReduceDriver.INTEGER_COMPARATOR);
    driver.setKeyOrderComparator(new JavaSerializationComparator<Integer>());
    driver.withInput(new IntWritable(1), 2).withInput(new IntWritable(2), 3);
    driver.withOutput(2, new IntWritable(1)).withOutput(3, new IntWritable(2))
        .runTest();
  }
View Full Code Here

        new IdentityReducer<Integer, IntWritable>());
    driver.addMapReduce(new IdentityMapper<Integer, IntWritable>(),
        new IdentityReducer<Integer, IntWritable>());
    driver.withConfiguration(conf);

    driver.withInput(1, new IntWritable(2)).withInput(2, new IntWritable(3));
    driver.withOutput(1, new IntWritable(2)).withOutput(2, new IntWritable(3));
    driver.runTest();
  }
View Full Code Here

    conf.setStrings("io.serializations", conf.get("io.serializations"),
        "org.apache.hadoop.io.serializer.JavaSerialization");
    final MapDriver<Integer, IntWritable, Integer, IntWritable> driver = MapDriver
        .newMapDriver(new IdentityMapper<Integer, IntWritable>())
        .withConfiguration(conf);
    driver.setInput(1, new IntWritable(2));
    driver.addOutput(1, new IntWritable(2));
    driver.runTest();
  }
View Full Code Here

        .newMapReduceDriver(new IdentityMapper<Integer, IntWritable>(),
            new IdentityReducer<Integer, IntWritable>())
        .withConfiguration(conf);
    driver.withKeyOrderComparator(new JavaSerializationComparator<Integer>());
    driver.withKeyGroupingComparator(INTEGER_COMPARATOR);
    driver.withInput(1, new IntWritable(2)).withInput(2, new IntWritable(3));
    driver.withOutput(1, new IntWritable(2)).withOutput(2, new IntWritable(3));
    driver.runTest();
  }
View Full Code Here

    FileSystem fs = outputPath.getFileSystem(conf);
   
    Path[] outfiles = DFUtils.listOutputFiles(fs, outputPath);
   
    // import the InMemOutputs
    IntWritable key = new IntWritable();
    MapredOutput value = new MapredOutput();
   
    for (Path path : outfiles) {
      Reader reader = new Reader(fs, path, conf);
     
      try {
        while (reader.next(key, value)) {
          output.put(key.get(), value.clone());
        }
      } finally {
        reader.close();
      }
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.IntWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.