Examples of SequenceFileRecordReader


Examples of org.apache.hadoop.mapred.SequenceFileRecordReader

    /**
     * Returns a reader for this split of the src file list.
     */
    public RecordReader getRecordReader(InputSplit split, JobConf job,
                                 Reporter reporter) throws IOException {
      return new SequenceFileRecordReader(job, (FileSplit)split);
    }
View Full Code Here

Examples of org.apache.hadoop.mapred.SequenceFileRecordReader

    /**
     * Returns a reader for this split of the src file list.
     */
    public RecordReader getRecordReader(InputSplit split, JobConf job,
                                 Reporter reporter) throws IOException {
      return new SequenceFileRecordReader(job, (FileSplit)split);
    }
View Full Code Here

Examples of org.apache.hadoop.mapred.SequenceFileRecordReader

      } catch (IOException e) {
        throw new RuntimeException("Cannot identify segment:", e);
      }

      try {
        return new SequenceFileRecordReader(job, (FileSplit)split) {
         
          @Override
          public synchronized boolean next(Writable key, Writable value) throws IOException {
            LOG.debug("Running OIF.next()");
           
View Full Code Here

Examples of org.apache.hadoop.mapred.SequenceFileRecordReader

      } catch (IOException e) {
        throw new RuntimeException("Cannot identify segment:", e);
      }

      try {
        return new SequenceFileRecordReader(job, (FileSplit)split) {
         
          @Override
          public synchronized boolean next(Writable key, Writable value) throws IOException {
            LOG.debug("Running OIF.next()");
           
View Full Code Here

Examples of org.apache.hadoop.mapred.SequenceFileRecordReader

      if (((RecordReader) reader) instanceof LineRecordReader
          || (RecordReader) reader instanceof SequenceFileRecordReader) {

        // sequnceFile的key是BytesWritable,lineRecordReader的key为LongWritable
        if ((RecordReader) reader instanceof SequenceFileRecordReader) {
          SequenceFileRecordReader sReader = (SequenceFileRecordReader) reader;
          key = (Writable) sReader.getKeyClass().newInstance();
        } else {
          key = new LongWritable();
        }
        while (reader.next(key, textValue)
            && count < MAX_RECORD_TO_READ) {
View Full Code Here

Examples of org.apache.hadoop.mapred.SequenceFileRecordReader

      if (((RecordReader) reader) instanceof LineRecordReader
          || (RecordReader) reader instanceof SequenceFileRecordReader) {

        // sequnceFile的key是BytesWritable,lineRecordReader的key为LongWritable
        if ((RecordReader) reader instanceof SequenceFileRecordReader) {
          SequenceFileRecordReader sReader = (SequenceFileRecordReader) reader;
          try {
            key = (Writable) sReader.getKeyClass().newInstance();
          } catch (Exception e) {
            log("partition download error!", e);
            resp.getWriter().write("分区下载失败");
            return;
          }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader

    assertFalse(underTest.shouldContinue());
  }
 
  public void testGetNext() throws IOException, InterruptedException
  {
    SequenceFileRecordReader reader = EasyMock.createMock(SequenceFileRecordReader.class);
    EasyMock.expect(reader.nextKeyValue()).andReturn(true);
    EasyMock.expect(reader.getCurrentKey()).andReturn(new Text("mykey1"));
    EasyMock.expect(reader.getCurrentValue()).andReturn(new BytesWritable("test123".getBytes()));
    EasyMock.replay(reader);
   
    underTest.script = "src/test/resources/echo.sh";
    underTest.prepareToRead(reader, null);
    Tuple tuple = underTest.getNext();
    assertEquals(4, tuple.size());
    assertEquals(tuple.get(0), new Text("mykey1"));
    assertEquals(tuple.get(1), new BytesWritable("test123".getBytes()));
    assertEquals(tuple.get(2), new File(underTest.dataDir, "mykey1").getAbsolutePath()+"\n");
    assertEquals(tuple.get(3), false);
   
    // returns null when no more tuples are available
    reader = EasyMock.createMock(SequenceFileRecordReader.class);
    EasyMock.expect(reader.nextKeyValue()).andReturn(false);
    EasyMock.replay(reader);
    underTest.reader = reader;
   
    tuple = underTest.getNext();
    assertNull(tuple);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader

    assertFalse(underTest.shouldContinue());
  }
 
  public void testGetNext() throws IOException, InterruptedException
  {
    SequenceFileRecordReader reader = EasyMock.createMock(SequenceFileRecordReader.class);
    EasyMock.expect(reader.nextKeyValue()).andReturn(true);
    EasyMock.expect(reader.getCurrentKey()).andReturn(new Text("mykey1"));
    EasyMock.expect(reader.getCurrentValue()).andReturn(new BytesWritable("test123".getBytes()));
    EasyMock.replay(reader);
   
    underTest.prepareToRead(reader, null);
    Tuple tuple = underTest.getNext();
    assertEquals(2, tuple.size());
    assertEquals(tuple.get(0), new Text("mykey1"));
    assertEquals(tuple.get(1), new BytesWritable("test123".getBytes()));
   
    // returns null when no more tuples are available
    reader = EasyMock.createMock(SequenceFileRecordReader.class);
    EasyMock.expect(reader.nextKeyValue()).andReturn(false);
    EasyMock.replay(reader);
    underTest.reader = reader;
   
    tuple = underTest.getNext();
    assertNull(tuple);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.