Package io.netty.buffer

Examples of io.netty.buffer.DrillBuf


      if (index >= nullableVarBinaryVector.getValueCapacity()) {
        return false;
      }

      if (usingDictionary) {
        DrillBuf b = DrillBuf.wrapByteBuffer(currDictValToWrite.toByteBuffer());
        NullableVarBinaryHolder holder = new NullableVarBinaryHolder();
        holder.buffer=b;
        holder.start=0;
        holder.end=currDictValToWrite.length();
        holder.isSet=1;
View Full Code Here


    List<ValueVector> vectorList = Lists.newArrayList();
    List<SerializedField> fieldList = batchDef.getFieldList();
    for (SerializedField metaData : fieldList) {
      int dataLength = metaData.getBufferLength();
      MaterializedField field = MaterializedField.create(metaData);
      DrillBuf buf = allocator.buffer(dataLength);
      if (buf == null) {
        throw new IOException(new OutOfMemoryException());
      }
      buf.writeBytes(input, dataLength);
      ValueVector vector = TypeHelper.getNewVector(field, allocator);
      vector.load(metaData, buf);
      buf.release();
      vectorList.add(vector);
    }
    container.addCollection(vectorList);
    container.buildSchema(svMode);
    container.setRecordCount(recordCount);
View Full Code Here

    DrillBuf[] incomingBuffers = batch.getBuffers();
    UserBitShared.RecordBatchDef batchDef = batch.getDef();

    /* DrillBuf associated with the selection vector */
    DrillBuf svBuf = null;
    Integer svCount =  null;

    if (svMode == BatchSchema.SelectionVectorMode.TWO_BYTE) {
      svCount = sv2.getCount();
      svBuf = sv2.getBuffer(); //this calls retain() internally
    }

    try {
      /* Write the metadata to the file */
      batchDef.writeDelimitedTo(output);

      /* If we have a selection vector, dump it to file first */
      if (svBuf != null) {
        svBuf.getBytes(0, output, svBuf.readableBytes());
        sv2.setBuffer(svBuf);
        svBuf.release(); // sv2 now owns the buffer
        sv2.setRecordCount(svCount);
      }

      /* Dump the array of ByteBuf's associated with the value vectors */
      for (DrillBuf buf : incomingBuffers) {
View Full Code Here

      int len = 0;
      for (DrillBuf b : buffers) {
        len += b.capacity();
      }

      DrillBuf newBuf = buffers[0].getAllocator().buffer(len);

      /* Copy data from each buffer into the compound buffer */
      int offset = 0;
      for (DrillBuf buf : buffers) {
        newBuf.setBytes(offset, buf);
        offset += buf.capacity();
        buf.release();
      }

      List<SerializedField> fields = def.getFieldList();

      int bufferOffset = 0;

      /*
       * For each value vector slice up the appropriate size from the compound buffer and load it into the value vector
       */
      int vectorIndex = 0;

      for (VectorWrapper<?> vv : container) {
        SerializedField fmd = fields.get(vectorIndex);
        ValueVector v = vv.getValueVector();
        DrillBuf bb = newBuf.slice(bufferOffset, fmd.getBufferLength());
//        v.load(fmd, cbb.slice(bufferOffset, fmd.getBufferLength()));
        v.load(fmd, bb);
        bb.release();
        vectorIndex++;
        bufferOffset += fmd.getBufferLength();
      }
    }

View Full Code Here

  public DrillBuf getBuffer() {
    return getBuffer(true);
  }

  public DrillBuf getBuffer(boolean clear) {
    DrillBuf bufferHandle = this.buffer;

    if (clear) {
      /* Increment the ref count for this buffer */
      bufferHandle.retain();

      /* We are passing ownership of the buffer to the
       * caller. clear the buffer from within our selection vector
       */
      clear();
View Full Code Here


    // Add a couple of Operator Contexts
    // Initial allocation = 1000000 bytes for all operators
    OperatorContext oContext11 = new OperatorContext(physicalOperator1, fragmentContext1, true);
    DrillBuf b11=oContext11.getAllocator().buffer(1000000);

    OperatorContext oContext12 = new OperatorContext(physicalOperator2, fragmentContext1, stats, true);
    DrillBuf b12=oContext12.getAllocator().buffer(500000);

    OperatorContext oContext21 = new OperatorContext(physicalOperator3, fragmentContext2, true);

    def = new OpProfileDef(physicalOperator4.getOperatorId(), UserBitShared.CoreOperatorType.TEXT_WRITER_VALUE, OperatorContext.getChildCount(physicalOperator4));
    stats = fragmentContext2.getStats().getOperatorStats(def, fragmentContext2.getAllocator());
    OperatorContext oContext22 = new OperatorContext(physicalOperator4, fragmentContext2, stats, true);
    DrillBuf b22=oContext22.getAllocator().buffer(2000000);

    // New Fragment begins
    BitControl.PlanFragment.Builder pfBuilder3=BitControl.PlanFragment.newBuilder();
    pfBuilder3.setMemInitial(1000000);
    BitControl.PlanFragment pf3=pfBuilder3.build();

    FragmentContext fragmentContext3 = new FragmentContext(bitContext, pf3, null, functionRegistry);

    // New fragment starts an operator that allocates an amount within the limit
    def = new OpProfileDef(physicalOperator5.getOperatorId(), UserBitShared.CoreOperatorType.UNION_VALUE, OperatorContext.getChildCount(physicalOperator5));
    stats = fragmentContext3.getStats().getOperatorStats(def, fragmentContext3.getAllocator());
    OperatorContext oContext31 = new OperatorContext(physicalOperator5, fragmentContext3, stats, true);

    DrillBuf b31a = oContext31.getAllocator().buffer(200000);

    //Previously running operator completes
    b22.release();
    oContext22.close();

    // Fragment 3 asks for more and fails
    boolean outOfMem=false;
    try {
      DrillBuf b31b = oContext31.getAllocator().buffer(4400000);
      if(b31b!=null) {
        b31b.release();
      }else{
        outOfMem=true;
      }
    }catch(Exception e){
      outOfMem=true;
    }
    assertEquals(true, (boolean)outOfMem);

    // Operator is Exempt from Fragment limits. Fragment 3 asks for more and succeeds
    outOfMem=false;
    OperatorContext oContext32 = new OperatorContext(physicalOperator6, fragmentContext3, false);
    DrillBuf b32=null;
    try {
      b32=oContext32.getAllocator().buffer(4400000);
    }catch(Exception e){
      outOfMem=true;
    }finally{
      if(b32!=null) {
        b32.release();
      }else{
        outOfMem=true;
      }
      oContext32.close();
    }
View Full Code Here

//        "}";
    MapVector v = new MapVector("", allocator);
    ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
    writer.allocate();

    DrillBuf buffer = allocator.buffer(255);
    JsonReaderWithState jsonReader = new JsonReaderWithState(new ReaderJSONRecordSplitter(compound), buffer,
        GroupScan.ALL_COLUMNS, false);
    int i =0;
    List<Integer> batchSizes = Lists.newArrayList();

    outside: while(true) {
      writer.setPosition(i);
      switch (jsonReader.write(writer)) {
      case WRITE_SUCCEED:
        i++;
        break;
      case NO_MORE:
        batchSizes.add(i);
        System.out.println("no more records - main loop");
        break outside;

      case WRITE_FAILED:
        System.out.println("==== hit bounds at " + i);
        //writer.setValueCounts(i - 1);
        batchSizes.add(i);
        i = 0;
        writer.allocate();
        writer.reset();

        switch(jsonReader.write(writer)) {
        case NO_MORE:
          System.out.println("no more records - new alloc loop.");
          break outside;
        case WRITE_FAILED:
          throw new RuntimeException("Failure while trying to write.");
        case WRITE_SUCCEED:
          i++;
        };

      };
    }

    int total = 0;
    int lastRecordCount = 0;
    for (Integer records : batchSizes) {
      total += records;
      lastRecordCount = records;
    }


    ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();

    ow.writeValueAsString(v.getAccessor().getObject(0));
    ow.writeValueAsString(v.getAccessor().getObject(1));
    FieldReader reader = v.get("col", MapVector.class).getAccessor().getReader();

    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    JsonWriter jsonWriter = new JsonWriter(stream, true);

    reader.setPosition(0);
    jsonWriter.write(reader);
    reader.setPosition(1);
    jsonWriter.write(reader);
    System.out.print("Json Read: ");
    System.out.println(new String(stream.toByteArray(), Charsets.UTF_8));
//    System.out.println(compound);

    System.out.println("Total Records Written " + batchSizes);

    reader.setPosition(lastRecordCount - 2);
    assertEquals("goodbye", reader.reader("c").readText().toString());
    reader.setPosition(lastRecordCount - 1);
    assertEquals("red", reader.reader("c").readText().toString());
    assertEquals((repeatSize+1) * 2, total);

    writer.clear();
    buffer.release();
  }
View Full Code Here

    ByteBuf[] byteBufs = writableBatch.getBuffers();
    int bytes = 0;
    for (int i = 0; i < byteBufs.length; i++) {
      bytes += byteBufs[i].writerIndex();
    }
    DrillBuf byteBuf = allocator.buffer(bytes);
    int index = 0;
    for (int i = 0; i < byteBufs.length; i++) {
      byteBufs[i].readBytes(byteBuf, index, byteBufs[i].writerIndex());
      index += byteBufs[i].writerIndex();
    }
    byteBuf.writerIndex(bytes);

    batchLoader.load(writableBatch.getDef(), byteBuf);
    boolean firstColumn = true;
    int recordCount = 0;
    for (VectorWrapper<?> v : batchLoader) {
View Full Code Here

    List<ValueVector> vectorList = Lists.newArrayList();
    List<SerializedField> fieldList = batchDef.getFieldList();
    for (SerializedField metaData : fieldList) {
      int dataLength = metaData.getBufferLength();
      MaterializedField field = MaterializedField.create(metaData);
      DrillBuf buf = allocator.buffer(dataLength);
      if (buf == null) {
        throw new IOException(new OutOfMemoryException());
      }
      buf.writeBytes(input, dataLength);
      ValueVector vector = TypeHelper.getNewVector(field, allocator);
      vector.load(metaData, buf);
      buf.release();
      vectorList.add(vector);
    }
    container.addCollection(vectorList);
    container.buildSchema(svMode);
    container.setRecordCount(recordCount);
View Full Code Here

    DrillBuf[] incomingBuffers = batch.getBuffers();
    UserBitShared.RecordBatchDef batchDef = batch.getDef();

    /* DrillBuf associated with the selection vector */
    DrillBuf svBuf = null;
    Integer svCount =  null;

    if (svMode == BatchSchema.SelectionVectorMode.TWO_BYTE)
    {
      svCount = sv2.getCount();
      svBuf = sv2.getBuffer(); //this calls retain() internally
    }

    try
    {
            /* Write the metadata to the file */
      batchDef.writeDelimitedTo(output);

            /* If we have a selection vector, dump it to file first */
      if (svBuf != null)
      {
        svBuf.getBytes(0, output, svBuf.readableBytes());
        sv2.setBuffer(svBuf);
        svBuf.release(); // sv2 now owns the buffer
        sv2.setRecordCount(svCount);
      }

            /* Dump the array of ByteBuf's associated with the value vectors */
      for (DrillBuf buf : incomingBuffers)
View Full Code Here

TOP

Related Classes of io.netty.buffer.DrillBuf

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.