Examples of BytesRefWritable


Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    byte[][] columnRandom;

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(columnNum);
    columnRandom = new byte[columnNum][];
    for (int i = 0; i < columnNum; i++) {
      BytesRefWritable cu = new BytesRefWritable();
      bytes.set(i, cu);
    }

    for (int i = 0; i < rowCount; i++) {
      nextRandomRow(columnRandom, bytes);
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
    for (int i = 0; i < bytesArray.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
      bytes.set(i, cu);
    }
    for (int i = 0; i < writeCount; i++) {
      writer.append(bytes);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
    for (int i = 0; i < bytesArray.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
      bytes.set(i, cu);
    }
    for (int i = 0; i < writeCount; i++) {
      writer.append(bytes);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      }
      for (int i = 0; i < recordsNumInValBuffer; i++) {
        colAdvanceRow(selColIdx, selCol);
        int length = selCol.prvLength;

        BytesRefWritable currentCell = rest.get(i);

        if (decompressed) {
          currentCell.set(uncompData, columnNextRowStart, length);
        } else {
          currentCell.set(decompCallBack, columnNextRowStart, length);
        }
        columnNextRowStart = columnNextRowStart + length;
      }
      return rest;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

          int i = col.colIndex;

          if (col.isNulled) {
            ret.set(i, null);
          } else {
            BytesRefWritable ref = ret.unCheckedGet(i);

            colAdvanceRow(j, col);

            if (currentValue.decompressedFlag[j]) {
              ref.set(currentValue.loadedColumnsValueBuffer[j].getData(),
                  col.rowReadIndex, col.prvLength);
            } else {
              ref.set(currentValue.lazyDecompressCallbackObjs[j],
                  col.rowReadIndex, col.prvLength);
            }
            col.rowReadIndex += col.prvLength;
          }
        }
      } else {
        // This version of the loop eliminates a condition check and branch
        // and is measurably faster (20% or so)
        for (int j = 0; j < selectedColumns.length; ++j) {
          SelectedColumn col = selectedColumns[j];
          int i = col.colIndex;

          if (col.isNulled) {
            ret.set(i, null);
          } else {
            BytesRefWritable ref = ret.unCheckedGet(i);

            colAdvanceRow(j, col);
            ref.set(currentValue.loadedColumnsValueBuffer[j].getData(),
                  col.rowReadIndex, col.prvLength);
            col.rowReadIndex += col.prvLength;
          }
        }
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

      }

      BytesRefArrayWritable columns = (BytesRefArrayWritable) val;
      int size = columns.size();
      for (int i = 0; i < size; i++) {
        BytesRefWritable cu = columns.get(i);
        int plainLen = cu.getLength();
        columnBufferSize += plainLen;
        columnValuePlainLength[i] += plainLen;
        columnBuffers[i].append(cu);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

                                              new Text("cat"),
                                              new Text("dog")),
                        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    bytes.clear();
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    writer.close();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

        new byte[0],
        "NULL".getBytes("UTF-8")};
   
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    bytes.clear();
    for (int i = 0; i < record_2.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_2[i], 0,
          record_2[i].length);
      bytes.set(i, cu);
    }
    writer.append(bytes);
    writer.close();

    RCFile.Reader reader = new RCFile.Reader(fs, file, conf);
   
    LongWritable rowID = new LongWritable();
    assertTrue(reader.next(rowID));
    assertEquals(rowID.get(), 0L);
   
    assertTrue(reader.next(rowID));
    assertEquals(rowID.get(), 1L);
   
    BytesRefArrayWritable result = null;
    BytesRefWritable brw;
    for (int col=0; col < 8; col++) {
      BytesRefArrayWritable result2 = reader.getColumn(col, result);
      if (result == null) {
        assertNotNull(result2);
        result = result2;
      } else {
        // #getColumn(2) should return the instance passed in:
        assertSame(result2, result);
      }
      // each column has height of 2:
      assertEquals(2, result.size());
      for (int row=0; row<result.size(); row++) {
        brw = result.get(row);
        int start = brw.getStart();
        int len = brw.getLength();
        byte[] actualData = Arrays.copyOfRange(brw.getData(), start, start + len);
        byte[] expectedData = (row == 0) ? record_1[col] : record_2[col];
        assertArrayEquals("col="+col+" : row="+row,  expectedData, actualData);
      }
     
      result.clear();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    for (int recIdx = 0; recIdx < recCount; recIdx++) {
      for (int i = 0; i < record.length; i++) {
        record[i] = new Integer(rand.nextInt()).toString().getBytes("UTF-8");
      }
      for (int i = 0; i < record.length; i++) {
        BytesRefWritable cu = new BytesRefWritable(record[i], 0,
            record[i].length);
        bytes.set(i, cu);
      }
      writer.append(bytes);
      bytes.clear();
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefWritable

    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(fieldsData.length);
    for (int i = 0; i < fieldsData.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(fieldsData[i], 0, fieldsData[i].length);
      bytes.set(i, cu);
    }

    for (int i = 0; i < count; i++) {
      writer.append(bytes);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.