Examples of ScanBatch


Examples of org.apache.drill.exec.physical.impl.ScanBatch

  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(InfoSchemaBatchCreator.class);

  @Override
  public RecordBatch getBatch(FragmentContext context, InfoSchemaSubScan config, List<RecordBatch> children) throws ExecutionSetupException {
    RecordReader rr = config.getTable().getRecordReader(context.getRootSchema());
    return new ScanBatch(config, context, Collections.singleton(rr).iterator());
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

    List<MockScanEntry> entries = config.getReadEntries();
    List<RecordReader> readers = Lists.newArrayList();
    for(MockScanEntry e : entries){
      readers.add(new MockRecordReader(context, e));
    }
    return new ScanBatch(config, context, readers.iterator());
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DirectBatchCreator.class);

  @Override
  public RecordBatch getBatch(FragmentContext context, DirectSubScan config, List<RecordBatch> children)
      throws ExecutionSetupException {
    return new ScanBatch(config, context, Collections.singleton(config.getReader()).iterator());
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

      for (int i = 0; i < numParts; i++) {
        selectedPartitionColumns.add(i);
      }
    }

    return new ScanBatch(scan, context, readers.iterator(), partitionColumns, selectedPartitionColumns);
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

      for (int i = 0; i < numParts; i++) {
        selectedPartitionColumns.add(i);
      }
    }

    return new ScanBatch(rowGroupScan, context, readers.iterator(), partitionColumns, selectedPartitionColumns);
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

    if (readers.size() == 0) {
      readers.add(new HiveRecordReader(table, null, null, config.getColumns(), context,
          config.getHiveReadEntry().hiveConfigOverride));
    }

    return new ScanBatch(config, context, readers.iterator());
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

        readers.add(new HBaseRecordReader(subScan.getStorageConfig().getHBaseConf(), scanSpec, columns, context));
      } catch (Exception e1) {
        throw new ExecutionSetupException(e1);
      }
    }
    return new ScanBatch(subScan, context, readers.iterator());
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

      for (int i = 0; i < numParts; i++) {
        selectedPartitionColumns.add(i);
      }
    }

    return new ScanBatch(scan, context, readers.iterator(), partitionColumns, selectedPartitionColumns);
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

      for (int i = 0; i < numParts; i++) {
        selectedPartitionColumns.add(i);
      }
    }

    ScanBatch s = new ScanBatch(rowGroupScan, context, readers.iterator(), partitionColumns, selectedPartitionColumns);

    for(RecordReader r  : readers){
      r.setOperatorContext(s.getOperatorContext());
    }

    return s;
  }
View Full Code Here

Examples of org.apache.drill.exec.physical.impl.ScanBatch

        logger.error(e.getMessage(), e);
        throw new ExecutionSetupException(e);
      }
    }
    logger.info("Number of record readers initialized : " + readers.size());
    return new ScanBatch(subScan, context, readers.iterator());
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.