Examples of BytesRefWritable


Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      }

      BytesRefArrayWritable columns = (BytesRefArrayWritable) val;
      int size = columns.size();
      for (int i = 0; i < size; i++) {
        BytesRefWritable cu = columns.get(i);
        int plainLen = cu.getLength();
        columnBufferSize += plainLen;
        columnValuePlainLength[i] += plainLen;
        columnBuffers[i].append(cu);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

      BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
      for (int i = 0; i < bytesArray.length; i++) {
        BytesRefWritable cu = null;
        cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
        bytes.set(i, cu);
      }
      for (int i = 0; i < writeCount; i++) {
        writer.append(bytes);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    byte[][] byteArray = buildBytesArray();

    BytesRefArrayWritable bytesWritable = new BytesRefArrayWritable(byteArray.length);
    for (int i = 0; i < byteArray.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(byteArray[i], 0, byteArray[i].length);
      bytesWritable.set(i, cu);
    }

    //Convert byte array to HCatRecord using isd, convert hcatrecord back to byte array
    //using osd, compare the two arrays
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

            "Currently the writer can only accept BytesRefArrayWritable");

      BytesRefArrayWritable columns = (BytesRefArrayWritable) val;
      int size = columns.size();
      for (int i = 0; i < size; i++) {
        BytesRefWritable cu = columns.get(i);
        int plainLen = cu.getLength();
        columnBufferSize += plainLen;
        columnValuePlainLength[i] += plainLen;
        columnBuffers[i].append(cu);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    bytes.clear();
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    writer.close();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(fieldsData.length);
    for (int i = 0; i < fieldsData.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(fieldsData[i], 0, fieldsData[i].length);
      bytes.set(i, cu);
    }

    for (int i = 0; i < count; i++) {
      writer.append(bytes);
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
    for (int i = 0; i < bytesArray.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
      bytes.set(i, cu);
    }
    for (int i = 0; i < writeCount; i++) {
      if(i == intervalRecordCount)
        System.out.println("write position:" + writer.getLength());
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    resetRandomGenerators();

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
    columnRandom = new byte[columnNum][];
    for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
    }

    // zero length key is not allowed by block compress writer, so we use a byte
    // writable
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    byte[][] columnRandom;

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
    columnRandom = new byte[columnNum][];
    for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
    }

    for (int i = 0; i < rowCount; i++) {
      nextRandomRow(columnRandom, bytes);
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      fetchColumnTempBuf.reset(currentKey.allCellValLenBuffer[columnID]
          .getData(), currentKey.allCellValLenBuffer[columnID].getLength());
      for (int i = 0; i < recordsNumInValBuffer; i++) {
        int length = getColumnNextValueLength(columnID);

        BytesRefWritable currentCell = rest.get(i);
        if (currentValue.decompressedFlag[columnID])
          currentCell.set(currentValue.loadedColumnsValueBuffer[columnID].getData(), columnNextRowStart, length);
        else
          currentCell.set(currentValue.lazyDecompressCallbackObjs[columnID], columnNextRowStart, length);
        columnNextRowStart = columnNextRowStart + length;
      }
      return rest;
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.