Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.Text


    Counter counter = counters
        .findCounter(DedupRecordCounter.TOTAL_RECORDS_READ);
    when(context.getCounter(DedupRecordCounter.TOTAL_RECORDS_READ))
        .thenReturn(counter);
    DedupValueMapper mapper = new DedupValueMapper();
    Text key = new Text("abc123");
    Text val = new Text("valueOfKey");
    mapper.map(key, val, context);

    HihoTuple<Text> hihoTuple = new HihoTuple<Text>();
    hihoTuple.setKey(val);
    verify(context).write(hihoTuple, key);
View Full Code Here


    Counters counters = new Counters();
    Counter counter = counters.findCounter(DedupRecordCounter.BAD_RECORD);
    when(context.getCounter(DedupRecordCounter.BAD_RECORD)).thenReturn(
        counter);
    DedupValueMapper mapper = new DedupValueMapper();
    Text val = new Text("valueOfKey");
    mapper.map(null, val, context);
  }
View Full Code Here

public class TestDedupKeyReducer {

  @Test
  public void testReducerValidValues() throws IOException,
      InterruptedException {
    Text key = new Text("key123");
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    String value1 = new String("value1");
    String value2 = new String("value2");
View Full Code Here

      InterruptedException {
    LongWritable key = new LongWritable(Long.parseLong("123"));
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    Text value1 = new Text("value1");
    ArrayList<Text> values = new ArrayList<Text>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
View Full Code Here

  }

  @Test
  public void testReducerForNullValues() throws IOException,
      InterruptedException {
    Text key = new Text("key123");
    HihoTuple hihoTuple = new HihoTuple();
    hihoTuple.setKey(key);

    String value1 = null;
    ArrayList<String> values = new ArrayList<String>();
View Full Code Here

    batchInfo.setId(id);
    when(mockSFHandler.createBatch(any(FSDataInputStream.class), any(BulkConnection.class), any(JobInfo.class))).thenReturn(
        batchInfo);
    ExportSalesforceMapper mapper = new ExportSalesforceMapper();
    mapper.setSfHandler(mockSFHandler);
    Text batchId = new Text(id);
    mapper.setBatchId(batchId);
    mapper.map(null, null, context)
  }
View Full Code Here

   
    Mapper.Context context = mock(Mapper.Context.class);
    BatchInfo batchInfo = new BatchInfo();
    String id = "id";
    batchInfo.setId(id);
    Text batchId = new Text();
    mapper.setBatchId(batchId);   
   
    JobInfo info = new JobInfo();
    String id1 = "id1";
    info.setId(id1);
    when(mockSFHandler.createBatch(any(FSDataInputStream.class), any(BulkConnection.class), any(JobInfo.class))).thenReturn(
        batchInfo);
    mapper.setSfHandler(mockSFHandler);
    mapper.setJob(info);
    mapper.setJobId(new Text(id1));
    mapper.map(new Text("abc"), mock(FSDataInputStream.class), context);
    assertEquals(id, batchId.toString());
    verify(context, times(1)).write(new Text(info.getId()), batchId);   
  }
View Full Code Here

  public final void testMapper() throws Exception {
    Mapper.Context context = mock(Mapper.Context.class);
    OracleLoadMapper mapper = new OracleLoadMapper();
    FTPClient ftpClient = mock(FTPClient.class);
    FSDataInputStream val=mock(FSDataInputStream.class);
    Text key = new Text("key");
    mapper.setFtpClient(ftpClient);
    mapper.map(key, val, context);
    verify(ftpClient).appendFile("key", val);
  }
View Full Code Here

  }
 
  @Test
  public void testDedupByIntWritableKeyWithSequenceFileInputFormat() throws Exception {
    HashMap<IntWritable, Text> inputData1 = new HashMap<IntWritable, Text>();
    inputData1.put(new IntWritable(1), new Text("Xavier Wilson,Mason Holloway,Carlos Johnston,Martin Noel,Drake Mckinney"));
    inputData1.put(new IntWritable(2), new Text("Kennedy Bailey,Jerome Perry,David Cabrera,Edan Fleming,Orlando Tyson"));
    inputData1.put(new IntWritable(3), new Text("Drake Mckinney,Murphy Baird,Theodore Lindsey,Nehru Wilcox,Harper Klein"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");
   
    HashMap<IntWritable, Text> inputData2 = new HashMap<IntWritable, Text>();
    inputData2.put(new IntWritable(1), new Text("Zephania Bauer,Jermaine Gordon,Vincent Moon,Steven Pierce,Jasper Campos"));
    inputData2.put(new IntWritable(2), new Text("Kennedy Bailey,Plato Atkinson,Stuart Guy,Rooney Levy,Judah Benson"));
    inputData2.put(new IntWritable(4), new Text("Drake Mckinney,Murphy Baird,Theodore Lindsey,Nehru Wilcox,Harper Klein"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");
   
    String[] args = new String[] {
        "-inputFormat", "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
        "-inputPath", "/input1,/input2",
View Full Code Here

  }
 
  @Test
  public void testDedupByValueWithSequenceFileInputFormat() throws Exception {
    HashMap<IntWritable, Text> inputData1 = new HashMap<IntWritable, Text>();
    inputData1.put(new IntWritable(1), new Text("Xavier Wilson,Mason Holloway,Carlos Johnston,Martin Noel,Drake Mckinney"));
    inputData1.put(new IntWritable(2), new Text("Kennedy Bailey,Jerome Perry,David Cabrera,Edan Fleming,Orlando Tyson"));
    inputData1.put(new IntWritable(3), new Text("Drake Mckinney,Murphy Baird,Theodore Lindsey,Nehru Wilcox,Harper Klein"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");
   
    HashMap<IntWritable, Text> inputData2 = new HashMap<IntWritable, Text>();
    inputData2.put(new IntWritable(1), new Text("Zephania Bauer,Jermaine Gordon,Vincent Moon,Steven Pierce,Jasper Campos"));
    inputData2.put(new IntWritable(2), new Text("Kennedy Bailey,Plato Atkinson,Stuart Guy,Rooney Levy,Judah Benson"));
    inputData2.put(new IntWritable(4), new Text("Drake Mckinney,Murphy Baird,Theodore Lindsey,Nehru Wilcox,Harper Klein"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");
   
    String[] args = new String[] {
        "-inputFormat", "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
        "-inputPath", "/input1,/input2",
        "-outputPath", "output",
        "-inputKeyClassName", "org.apache.hadoop.io.IntWritable",
        "-inputValueClassName", "org.apache.hadoop.io.Text",
        "-dedupBy", "value" };
    DedupJob job = runDedupJob(args)
    assertEquals(6, job.getTotalRecordsRead());
    assertEquals(0, job.getBadRecords());
    assertEquals(5, job.getOutput());
    assertEquals(1, job.getDuplicateRecords());
   
   
    FileSystem outputFS = getFileSystem();
    Path outputPath = new Path(outputFS.getHomeDirectory(), "output/part-r-00000");
    Configuration conf = new Configuration();
    SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf);
    Writable writableKey = (Writable)
    ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable writableValue = (Writable)
    ReflectionUtils.newInstance(reader.getValueClass(), conf);
    List<Text> expectedOutput = new ArrayList<Text>();
    expectedOutput.add(new Text("Xavier Wilson,Mason Holloway,Carlos Johnston,Martin Noel,Drake Mckinney"));
    expectedOutput.add(new Text("Kennedy Bailey,Jerome Perry,David Cabrera,Edan Fleming,Orlando Tyson"));
    expectedOutput.add(new Text("Drake Mckinney,Murphy Baird,Theodore Lindsey,Nehru Wilcox,Harper Klein"));
    expectedOutput.add(new Text("Zephania Bauer,Jermaine Gordon,Vincent Moon,Steven Pierce,Jasper Campos"));
    expectedOutput.add(new Text("Kennedy Bailey,Plato Atkinson,Stuart Guy,Rooney Levy,Judah Benson"));
    int count = 0;
    while (reader.next(writableKey, writableValue)) {
      logger.debug("key and value is: " + writableKey + ", " + writableValue);
      assertTrue("Matched output " + writableValue , expectedOutput.contains(writableValue));
      count++;
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.Text

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.