Examples of IntWritable


Examples of org.apache.hadoop.io.IntWritable

 
  @Test(expected = IOException.class)
  public final void testMapperForNullValues() throws IOException, InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    ScoreMapper mapper = new ScoreMapper();
    mapper.map(null, new IntWritable(1), context)
  }
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

  public void testScoreJobForValidValues() throws Exception{
    ValuePair valuePair = new ValuePair();
    valuePair.setValue1(new Text("This is a bookdelimiterBetweenKeyAndValuevalue1"));
    valuePair.setValue2(new Text("This is not a bookdelimiterBetweenKeyAndValuevalue2"));
    HashMap<ValuePair, IntWritable> inputData1 = new HashMap<ValuePair, IntWritable>();
    inputData1.put(valuePair, new IntWritable(1));
    createSequenceFileInHdfs(inputData1, "outputOfNGramJob", "part-r-00000");
   
    HashMap<ValuePair, IntWritable> inputData2 = new HashMap<ValuePair, IntWritable>();
    inputData2.put(valuePair, new IntWritable(1));
    createSequenceFileInHdfs(inputData2, "outputOfNGramJob", "part-r-00001");
   
   
    String[] args = new String[] {};
    ScoreJob job = runScoreJob(args)
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

    NGramReducer reducer = new NGramReducer();
    reducer.reduce(new Text("This is"), values, context);
    ValuePair valuePair = new ValuePair();
    valuePair.setValue1(new Text("This is a bookdelimiterBetweenKeyAndValuevalue1"));
    valuePair.setValue2(new Text("This is not a bookdelimiterBetweenKeyAndValuevalue2"));
    verify(context).write(valuePair, new IntWritable(1));   
  }
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

        ValuePair valuePair = new ValuePair();
        valuePair.setValue1(value.get(i));
        valuePair.setValue2(value.get(j));
        logger.info("Value set in ValuePair is: " + value.get(i) + ", "
            + value.get(j));
        context.write(valuePair, new IntWritable(1));
      }

    }
  }
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

    return student;
  }
 
  @Test
  public void testMergeByCustomObjectKeyWithSequenceFileInputFormat() throws Exception {   
    Student student1 = setStudent(new Text("Sam"),new Text("US"),new IntWritable(1),
        new LongWritable(9999999998l),new DoubleWritable(99.12));       
    Student student2 = setStudent(new Text("John"),new Text("AUS"),new IntWritable(2),
        new LongWritable(9999999999l),new DoubleWritable(90.12));       
    Student student3 = setStudent(new Text("Mary"),new Text("UK"),new IntWritable(3),
        new LongWritable(9999999988l),new DoubleWritable(69.12));   
    Student student4 = setStudent(new Text("Kelvin"),new Text("UK"),new IntWritable(4),
        new LongWritable(9999998888l),new DoubleWritable(59.12));
 
    HashMap<Student, Text> inputData1 = new HashMap<Student, Text>();
    inputData1.put(student1, new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(student2, new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

 
  @Test
  public void testMergeByIntWritableKeyWithSequenceFileInputFormat() throws Exception {
    HashMap<IntWritable, Text> inputData1 = new HashMap<IntWritable, Text>();
    inputData1.put(new IntWritable(1), new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(new IntWritable(2), new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(new IntWritable(3), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");
   
    HashMap<IntWritable, Text> inputData2 = new HashMap<IntWritable, Text>();
    inputData2.put(new IntWritable(1), new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(new IntWritable(2), new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    inputData2.put(new IntWritable(4), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");
 

    String[] args = new String[] {
        "-newPath""/input1",
        "-oldPath""/input2",
        "-mergeBy""key",
        "-outputPath", "output",
        "-inputFormat", "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
        "-inputKeyClassName", "org.apache.hadoop.io.IntWritable",
        "-inputValueClassName", "org.apache.hadoop.io.Text" };
    MergeJob job=runMergeJobs(args);
    assertEquals(3,job.getTotalRecordsNew());
    assertEquals(3,job.getTotalRecordsOld());
    assertEquals(0,job.getBadRecords());
    assertEquals(4,job.getOutput());
   
          FileSystem outputFS = getFileSystem();
          Path outputPath = new Path(outputFS.getHomeDirectory(), "output/part-r-00000");
          Configuration conf = new Configuration();
          SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf);
          Writable writableKey = (Writable)
          ReflectionUtils.newInstance(reader.getKeyClass(), conf);
          Writable writableValue = (Writable)
          ReflectionUtils.newInstance(reader.getValueClass(), conf);
          List<IntWritable> expectedOutput = new ArrayList<IntWritable>();
          expectedOutput.add(new IntWritable(1));
          expectedOutput.add(new IntWritable(2));
          expectedOutput.add(new IntWritable(3));
          expectedOutput.add(new IntWritable(4));
          int count = 0;
          while (reader.next(writableKey, writableValue)) {
          logger.debug("key and value is: " + writableKey + ", " + writableValue);
          assertTrue("Matched output " + writableKey , expectedOutput.contains(writableKey));
          count++;
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

  }

  @Test
  public void testMergeByValueWithSequenceFileInputFormat() throws Exception {
    HashMap<IntWritable, Text> inputData1 = new HashMap<IntWritable, Text>();
    inputData1.put(new IntWritable(1), new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(new IntWritable(2), new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(new IntWritable(3), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");
   
    HashMap<IntWritable, Text> inputData2 = new HashMap<IntWritable, Text>();
    inputData2.put(new IntWritable(1), new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(new IntWritable(2), new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    inputData2.put(new IntWritable(4), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");
   

    String[] args = new String[] {
        "-newPath""/input1",
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

  }
 
  @Test
  public void testMergeByValueWithSequenceFileAsTextInputFormat() throws Exception {
    HashMap<IntWritable, Text> inputData1 = new HashMap<IntWritable, Text>();
    inputData1.put(new IntWritable(1), new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(new IntWritable(2), new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(new IntWritable(3), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");
   
    HashMap<IntWritable, Text> inputData2 = new HashMap<IntWritable, Text>();
    inputData2.put(new IntWritable(1), new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    inputData2.put(new IntWritable(2), new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(new IntWritable(4), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");

    String[] args = new String[] {
        "-newPath", "/input1",
        "-oldPath""/input2",
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

  }

  @Test
  public void testReducerForIntWritableKeyAndValue() throws IOException,
      InterruptedException {
    IntWritable key = new IntWritable(123);
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    IntWritable value1 = new IntWritable(456);
    ArrayList<IntWritable> values = new ArrayList<IntWritable>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
View Full Code Here

Examples of org.apache.hadoop.io.IntWritable

  }

  @Test
  public void testReducerForIntWritableKeyAndValue() throws IOException,
      InterruptedException {
    IntWritable key = new IntWritable(123);
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    IntWritable value1 = new IntWritable(456);
    ArrayList<IntWritable> values = new ArrayList<IntWritable>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.