Package org.apache.hadoop.hive.serde2.columnar

Examples of org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable


  public void performSequenceFileRead(FileSystem fs, int count, Path file)
      throws IOException {
    SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
    ByteWritable key = new ByteWritable();
    BytesRefArrayWritable val = new BytesRefArrayWritable();
    for (int i = 0; i < count; i++)
      reader.next(key, val);
  }
View Full Code Here


  public int performRCFileReadFirstColumnTest(FileSystem fs, Path file,
      int allColumnsNumber, boolean chechCorrect) throws IOException {

    byte[][] checkBytes = null;
    BytesRefArrayWritable checkRow = new BytesRefArrayWritable(allColumnsNumber);
    if (chechCorrect) {
      this.resetRandomGenerators();
      checkBytes = new byte[allColumnsNumber][];
    }

    int actualReadCount = 0;

    java.util.ArrayList<Integer> readCols = new java.util.ArrayList<Integer>();
    readCols.add(Integer.valueOf(0));
    ColumnProjectionUtils.setReadColumnIDs(conf, readCols);
    RCFile.Reader reader = new RCFile.Reader(fs, file, conf);

    LongWritable rowID = new LongWritable();
    BytesRefArrayWritable cols = new BytesRefArrayWritable();
    while (reader.next(rowID)) {
      reader.getCurrentRow(cols);
      boolean ok = true;
      if (chechCorrect) {
        this.nextRandomRow(checkBytes, checkRow);
        ok = ok && (checkRow.get(0).equals(cols.get(0)));
      }
      if (!ok)
        throw new IllegalStateException("Compare read and write error.");
      actualReadCount++;
    }
View Full Code Here

  public int performRCFileReadFirstAndLastColumnTest(FileSystem fs, Path file,
      int allColumnsNumber, boolean chechCorrect) throws IOException {

    byte[][] checkBytes = null;
    BytesRefArrayWritable checkRow = new BytesRefArrayWritable(allColumnsNumber);
    if (chechCorrect) {
      this.resetRandomGenerators();
      checkBytes = new byte[allColumnsNumber][];
    }

    int actualReadCount = 0;

    java.util.ArrayList<Integer> readCols = new java.util.ArrayList<Integer>();
    readCols.add(Integer.valueOf(0));
    readCols.add(Integer.valueOf(allColumnsNumber - 1));
    ColumnProjectionUtils.setReadColumnIDs(conf, readCols);
    RCFile.Reader reader = new RCFile.Reader(fs, file, conf);

    LongWritable rowID = new LongWritable();
    BytesRefArrayWritable cols = new BytesRefArrayWritable();
    while (reader.next(rowID)) {
      reader.getCurrentRow(cols);
      boolean ok = true;
      if (chechCorrect) {
        this.nextRandomRow(checkBytes, checkRow);
        ok = ok && (checkRow.get(0).equals(cols.get(0)));
        ok = ok
            && checkRow.get(allColumnsNumber - 1).equals(
                cols.get(allColumnsNumber - 1));
      }
      if (!ok)
        throw new IllegalStateException("Compare read and write error.");
      actualReadCount++;
    }
View Full Code Here

  public int performRCFileFullyReadColumnTest(FileSystem fs, Path file,
      int allColumnsNumber, boolean chechCorrect) throws IOException {

    byte[][] checkBytes = null;
    BytesRefArrayWritable checkRow = new BytesRefArrayWritable(allColumnsNumber);
    if (chechCorrect) {
      this.resetRandomGenerators();
      checkBytes = new byte[allColumnsNumber][];
    }

    int actualReadCount = 0;

    ColumnProjectionUtils.setFullyReadColumns(conf);
    RCFile.Reader reader = new RCFile.Reader(fs, file, conf);

    LongWritable rowID = new LongWritable();
    BytesRefArrayWritable cols = new BytesRefArrayWritable();
    while (reader.next(rowID)) {
      reader.getCurrentRow(cols);
      boolean ok = true;
      if (chechCorrect) {
        this.nextRandomRow(checkBytes, checkRow);
View Full Code Here

      if (skippedColIDs[columnID]) {
        return null;
      }

      if (rest == null) {
        rest = new BytesRefArrayWritable();
      }

      rest.resetValid(recordsNumInValBuffer);

      if (!currentValue.inited)
View Full Code Here

                null, new DefaultCodec());

        PrintWriter pw = new PrintWriter(new FileWriter(plainOutput));

        for (int j = 0; j < numRows; j++) {
            BytesRefArrayWritable row = new BytesRefArrayWritable(numFields);

            byte[][] fields = null;

            if (format.equals("student")) {
                byte[][] f = {
                        randomName().getBytes("UTF-8"),
                        Integer.valueOf(randomAge()).toString().getBytes("UTF-8"),
                        Double.valueOf(randomGpa()).toString().getBytes("UTF-8")
                };
                fields = f;
            } else if (format.equals("voter")) {
                byte[][] f = {
                        randomName().getBytes("UTF-8"),
                        Integer.valueOf(randomAge()).toString().getBytes("UTF-8"),
                        randomRegistration().getBytes("UTF-8"),
                        Double.valueOf(randomContribution()).toString().getBytes("UTF-8")
                };
                fields = f;
            } else if (format.equals("alltypes")) {
                byte[][] f = {
                        Integer.valueOf(rand.nextInt(Byte.MAX_VALUE)).toString().getBytes("UTF-8"),
                        Integer.valueOf(rand.nextInt(Short.MAX_VALUE)).toString().getBytes("UTF-8"),
                        Integer.valueOf(rand.nextInt()).toString().getBytes("UTF-8"),
                        Long.valueOf(rand.nextLong()).toString().getBytes("UTF-8"),
                        Float.valueOf(rand.nextFloat() * 1000).toString().getBytes("UTF-8"),
                        Double.valueOf(rand.nextDouble() * 1000000).toString().getBytes("UTF-8"),
                        randomName().getBytes("UTF-8"),
                        randomMap(),
                        randomArray()
                };
                fields = f;
            }


            for (int i = 0; i < fields.length; i++) {
                BytesRefWritable field = new BytesRefWritable(fields[i], 0,
                        fields[i].length);
                row.set(i, field);
                pw.print(new String(fields[i]));
                if (i != fields.length - 1)
                    pw.print("\t");
                else
                    pw.println();
View Full Code Here

      if (!(val instanceof BytesRefArrayWritable)) {
        throw new UnsupportedOperationException(
            "Currently the writer can only accept BytesRefArrayWritable");
      }

      BytesRefArrayWritable columns = (BytesRefArrayWritable) val;
      int size = columns.size();
      for (int i = 0; i < size; i++) {
        BytesRefWritable cu = columns.get(i);
        int plainLen = cu.getLength();
        columnBufferSize += plainLen;
        columnValuePlainLength[i] += plainLen;
        columnBuffers[i].append(cu);
      }

      if (size < columnNumber) {
        for (int i = columns.size(); i < columnNumber; i++) {
          columnBuffers[i].append(BytesRefWritable.ZeroBytesRefWritable);
        }
      }

      bufferedRecords++;
View Full Code Here

      if (selColIdx == -1) {
        return null;
      }

      if (rest == null) {
        rest = new BytesRefArrayWritable();
      }

      rest.resetValid(recordsNumInValBuffer);

      if (!currentValue.inited) {
View Full Code Here

        RCFileOutputFormat.setColumnNumber(cloneConf, bytesArray.length);
        cloneConf.setInt(RCFile.RECORD_INTERVAL_CONF_STR, intervalRecordCount);

        RCFile.Writer writer = new RCFile.Writer(fs, cloneConf, testFile, null, codec);

        BytesRefArrayWritable bytes = new BytesRefArrayWritable(bytesArray.length);
        for (int i = 0; i < bytesArray.length; i++) {
            BytesRefWritable cu = null;
            cu = new BytesRefWritable(bytesArray[i], 0, bytesArray[i].length);
            bytes.set(i, cu);
        }
        for (int i = 0; i < writeCount; i++) {
            writer.append(bytes);
        }
        writer.close();
View Full Code Here

        this.start = in.getPosition();
        more = start < end;

        key = new LongWritable();
        value = new BytesRefArrayWritable();
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.