Package org.apache.drill.exec.record

Examples of org.apache.drill.exec.record.MaterializedField$Key


    List<SerializedField> fields = metadata.getChildList();

    int bufOffset = offsets.load(metadata.getValueCount()+1, buf);

    for (SerializedField fmd : fields) {
      MaterializedField fieldDef = MaterializedField.create(fmd);
      ValueVector v = vectors.get(fieldDef.getLastName());
      if (v == null) {
        // if we arrive here, we didn't have a matching vector.
        v = TypeHelper.getNewVector(fieldDef, allocator);
      }
      if (fmd.getValueCount() == 0) {
        v.clear();
      } else {
        v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
      }
      bufOffset += fmd.getBufferLength();
      put(fieldDef.getLastName(), v);
    }
  }
View Full Code Here


    private TransferPair[] pairs;
    private MapVector to;

    public SingleMapTransferPair(SchemaPath path) {

      MaterializedField mf = MaterializedField.create(path, Types.required(field.getType().getMinorType()));
      MapVector v = new MapVector(mf, allocator);
      pairs = new TransferPair[vectors.size()];
      int i =0;
      for (Map.Entry<String, ValueVector> e : vectors.entrySet()) {
        TransferPair otherSide = e.getValue().getTransferPair();
View Full Code Here

    List<SerializedField> fields = metadata.getChildList();
    valueCount = metadata.getValueCount();

    int bufOffset = 0;
    for (SerializedField fmd : fields) {
      MaterializedField fieldDef = MaterializedField.create(fmd);

      ValueVector v = vectors.get(fieldDef.getLastName());
      if (v == null) {
        // if we arrive here, we didn't have a matching vector.
        v = TypeHelper.getNewVector(fieldDef, allocator);
        put(fieldDef.getLastName(), v);
      }
      if (fmd.getValueCount() == 0) {
        v.clear();
      } else {
        v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
View Full Code Here

  public void load(SerializedField metadata, DrillBuf buf) {
    SerializedField childField = metadata.getChildList().get(0);

    int bufOffset = offsets.load(metadata.getValueCount()+1, buf);

    MaterializedField fieldDef = MaterializedField.create(childField);
    if (vector == null) {
      setVector(TypeHelper.getNewVector(fieldDef, allocator));
    }

    if (childField.getValueCount() == 0) {
View Full Code Here

  public void updateColumnMetaData(String catalogName, String schemaName, String tableName, BatchSchema schema){

    columns = new ColumnMetaData[schema.getFieldCount()];
    for(int i = 0; i < schema.getFieldCount(); i++){
      MaterializedField f = schema.getColumn(i);
      MajorType t = f.getType();
      ColumnMetaData col = new ColumnMetaData( //
          i, // ordinal
          false, // autoIncrement
          true, // caseSensitive
          false, // searchable
          false, // currency
          f.getDataMode() == DataMode.OPTIONAL ? ResultSetMetaData.columnNullable : ResultSetMetaData.columnNoNulls, //nullability
          !Types.isUnSigned(t), // signed
          10, // display size.
          f.getAsSchemaPath().getRootSegment().getPath(), // label
          f.getAsSchemaPath().getRootSegment().getPath(), // columnname
          schemaName, // schemaname
          t.hasPrecision() ? t.getPrecision() : 0, // precision
          t.hasScale() ? t.getScale() : 0, // scale
          null, // tablename is null so sqlline doesn't try to retrieve primary keys.
          catalogName, // catalogname
View Full Code Here

    }
    List<ValueVector> vectorList = Lists.newArrayList();
    List<SerializedField> fieldList = batchDef.getFieldList();
    for (SerializedField metaData : fieldList) {
      int dataLength = metaData.getBufferLength();
      MaterializedField field = MaterializedField.create(metaData);
      DrillBuf buf = allocator.buffer(dataLength);
      if (buf == null) {
        throw new IOException(new OutOfMemoryException());
      }
      buf.writeBytes(input, dataLength);
View Full Code Here

  public void load(SerializedField metadata, DrillBuf buf) {
    SerializedField childField = metadata.getChildList().get(0);

    int bufOffset = offsets.load(metadata.getValueCount()+1, buf);

    MaterializedField fieldDef = MaterializedField.create(childField);
    if(vector == null) {
      setVector(TypeHelper.getNewVector(fieldDef, allocator));
    }

    if (childField.getValueCount() == 0){
View Full Code Here

              final LogicalExpression expr = ExpressionTreeMaterializer.materialize(originalPath, incoming, collector, context.getFunctionRegistry() );
              if(collector.hasErrors()){
                throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
              }             

              MaterializedField outputField = MaterializedField.create(name, expr.getMajorType());
              ValueVector vv = TypeHelper.getNewVector(outputField, oContext.getAllocator());
              allocationVectors.add(vv);
              TypedFieldId fid = container.add(vv);
              ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
              HoldingContainer hc = cg.addExpr(write);

              cg.getEvalBlock()._if(hc.getValue().eq(JExpr.lit(0)))._then()._return(JExpr.FALSE);
            }
          }
          continue;
        }
      }

      String outputName = getRef(namedExpression).getRootSegment().getPath();
      if (result != null && result.outputNames != null && result.outputNames.size() > 0) {
        for (int j = 0; j < result.outputNames.size(); j++) {
          if (!result.outputNames.get(j).equals(EMPTY_STRING)) {
            outputName = result.outputNames.get(j);
            break;
          }
        }
      }
     
      final LogicalExpression expr = ExpressionTreeMaterializer.materialize(namedExpression.getExpr(), incoming, collector, context.getFunctionRegistry(), true);
      final MaterializedField outputField = MaterializedField.create(outputName, expr.getMajorType());
      if(collector.hasErrors()){
        throw new SchemaChangeException(String.format("Failure while trying to materialize incoming schema.  Errors:\n %s.", collector.toErrorString()));
      }

      // add value vector to transfer if direct reference and this is allowed, otherwise, add to evaluation stack.
View Full Code Here

    VectorContainer newContainer = batch.getContainer();
    if (schema != null && newContainer.getSchema().equals(schema)) {
      container.zeroVectors();
      BatchSchema schema = container.getSchema();
      for (int i = 0; i < container.getNumberOfColumns(); i++) {
        MaterializedField field = schema.getColumn(i);
        MajorType type = field.getType();
        ValueVector vOut = container.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
                container.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
        ValueVector vIn = newContainer.getValueAccessorById(TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()),
                newContainer.getValueVectorId(field.getPath()).getFieldIds()).getValueVector();
        TransferPair tp = vIn.makeTransferPair(vOut);
        tp.transfer();
      }
      return false;
    } else {
View Full Code Here

    return x;
  }

  private MaterializedField getVector(String name, MajorType type, int length) {
    assert context != null : "Context shouldn't be null.";
    MaterializedField f = MaterializedField.create(SchemaPath.getSimplePath(name), type);

    return f;

  }
View Full Code Here

TOP

Related Classes of org.apache.drill.exec.record.MaterializedField$Key

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.