Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.Text


  }

  @Test
  public void testMergeByValueWithSequenceFileInputFormat() throws Exception {
    HashMap<IntWritable, Text> inputData1 = new HashMap<IntWritable, Text>();
    inputData1.put(new IntWritable(1), new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(new IntWritable(2), new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(new IntWritable(3), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");
   
    HashMap<IntWritable, Text> inputData2 = new HashMap<IntWritable, Text>();
    inputData2.put(new IntWritable(1), new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(new IntWritable(2), new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    inputData2.put(new IntWritable(4), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");
   

    String[] args = new String[] {
        "-newPath""/input1",
        "-oldPath""/input2",
        "-mergeBy""value",
        "-outputPath","output",
        "-inputFormat""org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
        "-inputKeyClassName", "org.apache.hadoop.io.IntWritable",
        "-inputValueClassName", "org.apache.hadoop.io.Text" };
    MergeJob job=runMergeJobs(args);
    assertEquals(3,job.getTotalRecordsNew());
    assertEquals(3,job.getTotalRecordsOld());
    assertEquals(0,job.getBadRecords());
    assertEquals(5,job.getOutput());
   
      FileSystem outputFS = getFileSystem();
        Path outputPath = new Path(outputFS.getHomeDirectory(), "output/part-r-00000");
        Configuration conf = new Configuration();
        SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf);
        Writable writableKey = (Writable)
        ReflectionUtils.newInstance(reader.getKeyClass(), conf);
        Writable writableValue = (Writable)
        ReflectionUtils.newInstance(reader.getValueClass(), conf);
        List<Text> expectedOutput = new ArrayList<Text>();
        expectedOutput.add(new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
        expectedOutput.add(new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
        expectedOutput.add(new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
        expectedOutput.add(new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
        expectedOutput.add(new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
        int count = 0;
        while (reader.next(writableKey, writableValue)) {
        logger.debug("key and value is: " + writableKey + ", " + writableValue);
        assertTrue("Matched output " + writableValue , expectedOutput.contains(writableValue));
        count++;
View Full Code Here


  }
 
  @Test
  public void testMergeByValueWithSequenceFileAsTextInputFormat() throws Exception {
    HashMap<IntWritable, Text> inputData1 = new HashMap<IntWritable, Text>();
    inputData1.put(new IntWritable(1), new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(new IntWritable(2), new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(new IntWritable(3), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");
   
    HashMap<IntWritable, Text> inputData2 = new HashMap<IntWritable, Text>();
    inputData2.put(new IntWritable(1), new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    inputData2.put(new IntWritable(2), new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(new IntWritable(4), new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");

    String[] args = new String[] {
        "-newPath", "/input1",
        "-oldPath""/input2",
View Full Code Here

    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.BAD_RECORD);
    when(context.getCounter(MergeRecordCounter.BAD_RECORD)).thenReturn(
        counter);
    MergeKeyMapper mapper = new MergeKeyMapper();
    Text val = new Text("valueOfKey");
    mapper.map(null, val, context);
  }
View Full Code Here

        .findCounter(MergeRecordCounter.TOTAL_RECORDS_NEW);
    when(context.getCounter(MergeRecordCounter.TOTAL_RECORDS_NEW))
        .thenReturn(counter);

    MergeKeyMapper mapper = new MergeKeyMapper();
    Text key = new Text("abc123");
    Text val = new Text("valueOfKey");
    mapper.isOld = false;
    mapper.map(key, val, context);

    HihoValue hihoValue = new HihoValue();
    hihoValue.setVal(val);
View Full Code Here

public class TestDedupValueReducer {
  @Test
  public void testReducerValidValues() throws IOException,
      InterruptedException {
    Text key = new Text("key123");
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    String value1 = new String("value1");
    String value2 = new String("value2");
View Full Code Here

  }

  @Test
  public void testReducerForNullValues() throws IOException,
      InterruptedException {
    Text key = new Text("key123");
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    String value1 = null;
    ArrayList<String> values = new ArrayList<String>();
View Full Code Here

      InterruptedException {
    LongWritable key = new LongWritable(Long.parseLong("123"));
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    Text value1 = new Text("value1");
    ArrayList<Text> values = new ArrayList<Text>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
View Full Code Here

    Counter counter = counters
        .findCounter(DedupRecordCounter.TOTAL_RECORDS_READ);
    when(context.getCounter(DedupRecordCounter.TOTAL_RECORDS_READ))
        .thenReturn(counter);
    DedupKeyMapper<Text, String> mapper = new DedupKeyMapper<Text, String>();
    Text key = new Text("abc123");
    String val = "valueOfKey";
    mapper.map(key, val, context);

    HihoTuple<Text> hihoTuple = new HihoTuple<Text>();
    hihoTuple.setKey(key);
View Full Code Here

public class TestHihoTuple {

  @Test
  public void testSetKey() throws IOException {
    Text key = new Text("abc123");
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);
    assertEquals(key, hihoTuple.getKey());
    assertEquals(HashUtility.getMD5Hash(key), hihoTuple.getHash());
  }
View Full Code Here

    assertEquals(HashUtility.getMD5Hash(key), hihoTuple.getHash());
  }

  @Test
  public void testSetMD5Hash() throws IOException {
    Text key = new Text("abc123");
    MD5Hash hash = HashUtility.getMD5Hash(key);
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setHash(hash);
    assertEquals(hash, hihoTuple.getHash());
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.Text

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.