Examples of HadoopInput


Examples of org.apache.trevni.avro.HadoopInput

  private int [] projectionMap;
  private ColumnValues [] columns;

  public TrevniScanner(Configuration conf, TableMeta meta, Fragment fragment) throws IOException {
    super(conf, meta, fragment);
    reader = new ColumnFileReader(new HadoopInput(fragment.getPath(), conf));
  }
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

          public void initialize(final InputSplit isplit,
              final TaskAttemptContext tac)
                  throws IOException, InterruptedException {
            fsplit = (FileSplit) isplit;
            params = new AvroColumnReader.Params(
                new HadoopInput(fsplit.getPath(), tac.getConfiguration()));
            Schema inputSchema = getInputAvroSchema();
            params.setSchema(inputSchema);
            reader = new AvroColumnReader<GenericData.Record>(params);
            rows = reader.getRowCount();
          }
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

      throw new IOException("No path matches pattern " + p.toString());
    }

    AvroColumnReader.Params params =
        new AvroColumnReader.Params(
            new HadoopInput(filePath, job.getConfiguration()));
    AvroColumnReader<GenericData.Record> reader =
        new AvroColumnReader<GenericData.Record>(params);
    Schema s = reader.getFileSchema();
    reader.close();
    return s;
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

          public void initialize(final InputSplit isplit,
              final TaskAttemptContext tac)
                  throws IOException, InterruptedException {
            fsplit = (FileSplit) isplit;
            params = new AvroColumnReader.Params(
                new HadoopInput(fsplit.getPath(), tac.getConfiguration()));
            Schema inputSchema = getInputAvroSchema();
            params.setSchema(inputSchema);
            reader = new AvroColumnReader<GenericData.Record>(params);
            rows = reader.getRowCount();
          }
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

  @Override
  public Iterator<T> read(FileSystem fs, final Path path) {
    this.mapFn.initialize();
    try {
      HadoopInput input = new HadoopInput(path, fs.getConf());
      final AvroColumnReader<T> reader = getReader(input, aType, schema);
      return new AutoClosingIterator<T>(reader, new UnmodifiableIterator<T>() {
        @Override
        public boolean hasNext() {
          return reader.hasNext();
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

          public void initialize(final InputSplit isplit,
              final TaskAttemptContext tac)
                  throws IOException, InterruptedException {
            fsplit = (FileSplit) isplit;
            params = new AvroColumnReader.Params(
                new HadoopInput(fsplit.getPath(), tac.getConfiguration()));
            Schema inputSchema = getInputAvroSchema();
            params.setSchema(inputSchema);
            reader = new AvroColumnReader<GenericData.Record>(params);
            rows = reader.getRowCount();
          }
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

      throw new IOException("No path matches pattern " + p.toString());
    }

    AvroColumnReader.Params params =
        new AvroColumnReader.Params(
            new HadoopInput(filePath, job.getConfiguration()));
    AvroColumnReader<GenericData.Record> reader =
        new AvroColumnReader<GenericData.Record>(params);
    Schema s = reader.getFileSchema();
    reader.close();
    return s;
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

          public void initialize(final InputSplit isplit,
              final TaskAttemptContext tac)
                  throws IOException, InterruptedException {
            fsplit = (FileSplit) isplit;
            params = new AvroColumnReader.Params(
                new HadoopInput(fsplit.getPath(), tac.getConfiguration()));
            Schema inputSchema = getInputAvroSchema();
            params.setSchema(inputSchema);
            reader = new AvroColumnReader<GenericData.Record>(params);
            rows = reader.getRowCount();
          }
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

  private int [] projectionMap;
  private ColumnValues [] columns;

  public TrevniScanner(Configuration conf, Schema schema, TableMeta meta, FileFragment fragment) throws IOException {
    super(conf, schema, meta, fragment);
    reader = new ColumnFileReader(new HadoopInput(fragment.getPath(), conf));
  }
View Full Code Here

Examples of org.apache.trevni.avro.HadoopInput

            @Override
            public void initialize(InputSplit isplit, TaskAttemptContext tac) throws IOException, InterruptedException {
              // TODO Auto-generated method stub
              fsplit = (FileSplit) isplit;
              params = new AvroColumnReader.Params(
                  new HadoopInput(fsplit.getPath(), tac.getConfiguration()));
              Schema inputSchema = getInputAvroSchema();
              System.err.printf("initializing RecordReader with schema %s\n", inputSchema);
              params.setSchema(inputSchema);
              reader = new AvroColumnReader<GenericData.Record>(params);
              rows = reader.getRowCount();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.