Examples of BytesRefWritable


Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      // DataOutputStream to BytesWritable

      for (int j = 0; j < prjColIDs.length; ++j) {
        int i = prjColIDs[j];

        BytesRefWritable ref = ret.unCheckedGet(i);

        int columnCurrentRowStart = (int) columnRowReadIndex[i];
        int length = getColumnNextValueLength(i);
        columnRowReadIndex[i] = columnCurrentRowStart + length;

        if (currentValue.decompressedFlag[j])
          ref.set(currentValue.loadedColumnsValueBuffer[j].getData(), columnCurrentRowStart, length);
        else
          ref.set(currentValue.lazyDecompressCallbackObjs[j], columnCurrentRowStart, length);
      }
      rowFetched = true;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

                fields = f;
            }


            for (int i = 0; i < fields.length; i++) {
                BytesRefWritable field = new BytesRefWritable(fields[i], 0,
                        fields[i].length);
                row.set(i, field);
                pw.print(new String(fields[i]));
                if (i != fields.length - 1)
                    pw.print("\t");
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      }

      BytesRefArrayWritable columns = (BytesRefArrayWritable) val;
      int size = columns.size();
      for (int i = 0; i < size; i++) {
        BytesRefWritable cu = columns.get(i);
        int plainLen = cu.getLength();
        columnBufferSize += plainLen;
        columnValuePlainLength[i] += plainLen;
        columnBuffers[i].append(cu);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      }
      for (int i = 0; i < recordsNumInValBuffer; i++) {
        colAdvanceRow(selColIdx, selCol);
        int length = selCol.prvLength;

        BytesRefWritable currentCell = rest.get(i);

        if (decompressed) {
          currentCell.set(uncompData, columnNextRowStart, length);
        } else {
          currentCell.set(decompCallBack, columnNextRowStart, length);
        }
        columnNextRowStart = columnNextRowStart + length;
      }
      return rest;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      if (currentValue.numCompressed > 0) {
        for (int j = 0; j < selectedColumns.length; ++j) {
          SelectedColumn col = selectedColumns[j];
          int i = col.colIndex;

          BytesRefWritable ref = ret.unCheckedGet(i);

          colAdvanceRow(j, col);

          if (currentValue.decompressedFlag[j]) {
            ref.set(currentValue.loadedColumnsValueBuffer[j].getData(),
                col.rowReadIndex, col.prvLength);
          } else {
            ref.set(currentValue.lazyDecompressCallbackObjs[j],
                col.rowReadIndex, col.prvLength);
          }
          col.rowReadIndex += col.prvLength;
        }
      } else {
        // This version of the loop eliminates a condition check and branch
        // and is measurably faster (20% or so)
        for (int j = 0; j < selectedColumns.length; ++j) {
          SelectedColumn col = selectedColumns[j];
          int i = col.colIndex;

          BytesRefWritable ref = ret.unCheckedGet(i);

          colAdvanceRow(j, col);
          ref.set(currentValue.loadedColumnsValueBuffer[j].getData(),
                col.rowReadIndex, col.prvLength);
          col.rowReadIndex += col.prvLength;
        }
      }
      rowFetched = true;
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

        RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

        BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
        for (int i = 0; i < bytesArray.length; i++) {
            BytesRefWritable cu = null;
            cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
            bytes.set(i, cu);
        }
        for (int i = 0; i < writeCount; i++) {
            writer.append(bytes);
        }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      // Data
      BytesRefArrayWritable braw = new BytesRefArrayWritable(8);
      String[] data = {"123", "456", "789", "1000", "5.3", "hive and hadoop", "1.", "NULL"};
      for (int i = 0; i < 8; i++) {
        braw.set(i, new BytesRefWritable(data[i].getBytes()));
      }
      // Test
      deserializeAndSerializeColumnar(serDe, braw, data);
      System.out.println("test: testColumnarSerde - OK");
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    bytes.clear();
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    writer.close();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(fieldsData.length);
    for (int i = 0; i < fieldsData.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(fieldsData[i], 0, fieldsData[i].length);
      bytes.set(i, cu);
    }

    for (int i = 0; i < count; i++) {
      writer.append(bytes);
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    resetRandomGenerators();

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
    columnRandom = new byte[columnNum][];
    for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
    }

    // zero length key is not allowed by block compress writer, so we use a byte
    // writable
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.