Examples of RCFileKeyBufferWrapper


Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  public void map(Object k, RCFileValueBufferWrapper value,
      OutputCollector<Object, Object> output, Reporter reporter)
      throws IOException {
    try {

      RCFileKeyBufferWrapper key = null;
      if (k instanceof CombineHiveKey) {
        key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();
      } else {
        key = (RCFileKeyBufferWrapper) k;
      }

      if (work.getListBucketingCtx().calculateListBucketingLevel() > 0) {
        if (!this.tmpPathFixedConcatenate) {
          fixTmpPathConcatenate(key.getInputPath().getParent(),
              work.getListBucketingCtx().calculateListBucketingLevel());
          tmpPathFixedConcatenate = true;
        }
      }

      if (outWriter == null) {
        codec = key.getCodec();
        columnNumber = key.getKeyBuffer().getColumnNumber();
        jc.setInt(RCFile.COLUMN_NUMBER_CONF_STR, columnNumber);
        outWriter = new RCFile.Writer(fs, jc, outPath, null, codec);
      }

      for (Integer i : work.getDroppedColumns()) {
        key.getKeyBuffer().nullColumn(i);
        value.getValueBuffer().nullColumn(i);
      }

      int keyLength = key.getKeyBuffer().getSize();
      int recordLength = key.getKeyBuffer().getSize();
      for (int columnLen : key.getKeyBuffer().getEachColumnValueLen()) {
        recordLength += columnLen;
      }

      outWriter.flushBlock(key.getKeyBuffer(), value.getValueBuffer(), recordLength,
          keyLength, key.getCompressedKeyLength());
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  public void map(Object k, RCFileValueBufferWrapper value,
      OutputCollector<Object, Object> output, Reporter reporter)
      throws IOException {
    try {

      RCFileKeyBufferWrapper key = null;
      if (k instanceof CombineHiveKey) {
        key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();
      } else {
        key = (RCFileKeyBufferWrapper) k;
      }

      if (work.getListBucketingCtx().calculateListBucketingLevel() > 0) {
        if (!this.tmpPathFixedConcatenate) {
          fixTmpPathConcatenate(key.getInputPath().getParent(),
              work.getListBucketingCtx().calculateListBucketingLevel());
          tmpPathFixedConcatenate = true;
        }
      }

      if (outWriter == null) {
        codec = key.getCodec();
        columnNumber = key.getKeyBuffer().getColumnNumber();
        jc.setInt(RCFile.COLUMN_NUMBER_CONF_STR, columnNumber);
        outWriter = new RCFile.Writer(fs, jc, outPath, null, codec);
      }

      for (Integer i : work.getDroppedColumns()) {
        key.getKeyBuffer().nullColumn(i);
        value.getValueBuffer().nullColumn(i);
      }

      int keyLength = key.getKeyBuffer().getSize();
      int recordLength = key.getKeyBuffer().getSize();
      for (int columnLen : key.getKeyBuffer().getEachColumnValueLen()) {
        recordLength += columnLen;
      }

      outWriter.flushBlock(key.getKeyBuffer(), value.getValueBuffer(), recordLength,
          keyLength, key.getCompressedKeyLength());
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

      MapredContext.get().setReporter(reporter);
    }

    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  public void map(Object k, RCFileValueBufferWrapper value,
      OutputCollector<Object, Object> output, Reporter reporter)
      throws IOException {
    try {

      RCFileKeyBufferWrapper key = null;
      if (k instanceof CombineHiveKey) {
        key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();
      } else {
        key = (RCFileKeyBufferWrapper) k;
      }

      if (work.getListBucketingCtx().calculateListBucketingLevel() > 0) {
        if (!this.tmpPathFixedConcatenate) {
          fixTmpPathConcatenate(key.getInputPath().getParent(),
              work.getListBucketingCtx().calculateListBucketingLevel());
          tmpPathFixedConcatenate = true;
        }
      }

      if (outWriter == null) {
        codec = key.getCodec();
        columnNumber = key.getKeyBuffer().getColumnNumber();
        RCFileOutputFormat.setColumnNumber(jc, columnNumber);
        outWriter = new RCFile.Writer(fs, jc, outPath, null, codec);
      }

      for (Integer i : work.getDroppedColumns()) {
        key.getKeyBuffer().nullColumn(i);
        value.getValueBuffer().nullColumn(i);
      }

      int keyLength = key.getKeyBuffer().getSize();
      int recordLength = key.getKeyBuffer().getSize();
      for (int columnLen : key.getKeyBuffer().getEachColumnValueLen()) {
        recordLength += columnLen;
      }

      outWriter.flushBlock(key.getKeyBuffer(), value.getValueBuffer(), recordLength,
          keyLength, key.getCompressedKeyLength());
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  private void processKeyValuePairs(Object k, Object v)
      throws HiveException {
    try {

      RCFileKeyBufferWrapper key;
      if (k instanceof CombineHiveKey) {
        key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();
      } else {
        key = (RCFileKeyBufferWrapper) k;
      }
      RCFileValueBufferWrapper value = (RCFileValueBufferWrapper) v;

      fixTmpPath(key.getInputPath().getParent());

      if (outWriter == null) {
        codec = key.getCodec();
        columnNumber = key.getKeyBuffer().getColumnNumber();
        RCFileOutputFormat.setColumnNumber(jc, columnNumber);
        outWriter = new RCFile.Writer(fs, jc, outPath, null, codec);
      }

      boolean sameCodec = ((codec == key.getCodec()) || codec.getClass().equals(
          key.getCodec().getClass()));

      if ((key.getKeyBuffer().getColumnNumber() != columnNumber) ||
          (!sameCodec)) {
        throw new IOException( "RCFileMerge failed because the input files" +
            " use different CompressionCodec or have different column number" +
            " setting.");
      }

      outWriter.flushBlock(key.getKeyBuffer(), value.getValueBuffer(),
          key.getRecordLength(), key.getKeyLength(),
          key.getCompressedKeyLength());
    } catch (Throwable e) {
      this.exception = true;
      closeOp(true);
      throw new HiveException(e);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  public void map(Object k, RCFileValueBufferWrapper value,
      OutputCollector<Object, Object> output, Reporter reporter)
      throws IOException {
    try {

      RCFileKeyBufferWrapper key = null;
      if (k instanceof CombineHiveKey) {
        key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();
      } else {
        key = (RCFileKeyBufferWrapper) k;
      }

      if (work.getListBucketingCtx().calculateListBucketingLevel() > 0) {
        if (!this.tmpPathFixedConcatenate) {
          fixTmpPathConcatenate(key.getInputPath().getParent(),
              work.getListBucketingCtx().calculateListBucketingLevel());
          tmpPathFixedConcatenate = true;
        }
      }

      if (outWriter == null) {
        codec = key.getCodec();
        columnNumber = key.getKeyBuffer().getColumnNumber();
        jc.setInt(RCFile.COLUMN_NUMBER_CONF_STR, columnNumber);
        outWriter = new RCFile.Writer(fs, jc, outPath, null, codec);
      }

      for (Integer i : work.getDroppedColumns()) {
        key.getKeyBuffer().nullColumn(i);
        value.getValueBuffer().nullColumn(i);
      }

      int keyLength = key.getKeyBuffer().getSize();
      int recordLength = key.getKeyBuffer().getSize();
      for (int columnLen : key.getKeyBuffer().getEachColumnValueLen()) {
        recordLength += columnLen;
      }

      outWriter.flushBlock(key.getKeyBuffer(), value.getValueBuffer(), recordLength,
          keyLength, key.getCompressedKeyLength());
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

      MapredContext.get().setReporter(reporter);
    }

    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  public void map(Object k, RCFileValueBufferWrapper value,
      OutputCollector<Object, Object> output, Reporter reporter)
      throws IOException {
    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  public void map(Object k, RCFileValueBufferWrapper value,
      OutputCollector<Object, Object> output, Reporter reporter)
      throws IOException {
    try {
      //CombineHiveInputFormat is set in PartialScanTask.
      RCFileKeyBufferWrapper key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();

      // calculate rawdatasize
      KeyBuffer keyBuffer = key.getKeyBuffer();
      long[] uncompressedColumnSizes = new long[keyBuffer.getColumnNumber()];
      for (int i = 0; i < keyBuffer.getColumnNumber(); i++) {
        uncompressedColumnSizes[i] += keyBuffer.getEachColumnUncompressedValueLen()[i];
      }
      if (uncompressedColumnSizes != null) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileKeyBufferWrapper

  public void map(Object k, RCFileValueBufferWrapper value,
      OutputCollector<Object, Object> output, Reporter reporter)
      throws IOException {
    try {

      RCFileKeyBufferWrapper key = null;
      if (k instanceof CombineHiveKey) {
        key = (RCFileKeyBufferWrapper) ((CombineHiveKey) k).getKey();
      } else {
        key = (RCFileKeyBufferWrapper) k;
      }

      if (work.getListBucketingCtx().calculateListBucketingLevel() > 0) {
        if (!this.tmpPathFixedConcatenate) {
          fixTmpPathConcatenate(key.getInputPath().getParent(),
              work.getListBucketingCtx().calculateListBucketingLevel());
          tmpPathFixedConcatenate = true;
        }
      }

      if (outWriter == null) {
        codec = key.getCodec();
        columnNumber = key.getKeyBuffer().getColumnNumber();
        jc.setInt(RCFile.COLUMN_NUMBER_CONF_STR, columnNumber);
        outWriter = new RCFile.Writer(fs, jc, outPath, null, codec);
      }

      for (Integer i : work.getDroppedColumns()) {
        key.getKeyBuffer().nullColumn(i);
        value.getValueBuffer().nullColumn(i);
      }

      int keyLength = key.getKeyBuffer().getSize();
      int recordLength = key.getKeyBuffer().getSize();
      for (int columnLen : key.getKeyBuffer().getEachColumnValueLen()) {
        recordLength += columnLen;
      }

      outWriter.flushBlock(key.getKeyBuffer(), value.getValueBuffer(), recordLength,
          keyLength, key.getCompressedKeyLength());
    } catch (Throwable e) {
      this.exception = true;
      close();
      throw new IOException(e);
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.