Package org.apache.hadoop.hbase.io.encoding

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder


        new DoubleOutputStream(dos, baos);
    writeTestKeyValues(doubleOutputStream, blockId, includesMemstoreTS);
    ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray());
    rawBuf.rewind();

    DataBlockEncoder encoder = encoding.getEncoder();
    int headerLen = dummyHeader.length;
    byte[] encodedResultWithHeader = null;
    if (encoder != null) {
      HFileBlockEncodingContext encodingCtx =
          encoder.newDataBlockEncodingContext(algo, encoding, dummyHeader);
      encoder.encodeKeyValues(rawBuf, includesMemstoreTS,
          encodingCtx);
      encodedResultWithHeader =
          encodingCtx.getUncompressedBytesWithHeader();
    } else {
      HFileBlockDefaultEncodingContext defaultEncodingCtx =
View Full Code Here


      short dataBlockEncoderId = block.getDataBlockEncodingId();
      if (dataBlockEncoder == null ||
          !DataBlockEncoding.isCorrectEncoder(dataBlockEncoder,
              dataBlockEncoderId)) {
        DataBlockEncoder encoder =
            DataBlockEncoding.getDataBlockEncoderById(dataBlockEncoderId);
        setDataBlockEncoder(encoder);
      }

      seeker.setCurrentBuffer(getEncodedBuffer(newBlock));
View Full Code Here

   * @param encodeCtx where will the output data be stored
   */
  private void encodeBufferToHFileBlockBuffer(ByteBuffer in,
      DataBlockEncoding algo, boolean includesMemstoreTS,
      HFileBlockEncodingContext encodeCtx) {
    DataBlockEncoder encoder = algo.getEncoder();
    try {
      encoder.encodeKeyValues(in, includesMemstoreTS, encodeCtx);
    } catch (IOException e) {
      throw new RuntimeException(String.format(
          "Bug in data block encoder "
              + "'%s', it probably requested too much data, " +
              "exception message: %s.",
View Full Code Here

  @Override
  public HFileBlockEncodingContext newOnDiskDataBlockEncodingContext(
      Algorithm compressionAlgorithm,  byte[] dummyHeader) {
    if (onDisk != null) {
      DataBlockEncoder encoder = onDisk.getEncoder();
      if (encoder != null) {
        return encoder.newDataBlockEncodingContext(
            compressionAlgorithm, onDisk, dummyHeader);
      }
    }
    return new HFileBlockDefaultEncodingContext(compressionAlgorithm,
        null, dummyHeader);
View Full Code Here

  @Override
  public HFileBlockDecodingContext newOnDiskDataBlockDecodingContext(
      Algorithm compressionAlgorithm) {
    if (onDisk != null) {
      DataBlockEncoder encoder = onDisk.getEncoder();
      if (encoder != null) {
        return encoder.newDataBlockDecodingContext(
            compressionAlgorithm);
      }
    }
    return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
  }
View Full Code Here

    for (DataBlockEncoding encoding : encodings) {
      if (encoding == DataBlockEncoding.NONE) {
        continue;
      }
      DataBlockEncoder d = encoding.getEncoder();
      codecs.add(new EncodedDataBlock(d, includesMemstoreTS, encoding, rawKVs));
    }
  }
View Full Code Here

        new DoubleOutputStream(dos, baos);
    writeTestKeyValues(doubleOutputStream, blockId, includesMemstoreTS);
    ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray());
    rawBuf.rewind();

    DataBlockEncoder encoder = encoding.getEncoder();
    int headerLen = dummyHeader.length;
    byte[] encodedResultWithHeader = null;
    if (encoder != null) {
      HFileBlockEncodingContext encodingCtx =
          encoder.newDataBlockEncodingContext(algo, encoding, dummyHeader);
      encoder.encodeKeyValues(rawBuf, includesMemstoreTS,
          encodingCtx);
      encodedResultWithHeader =
          encodingCtx.getUncompressedBytesWithHeader();
    } else {
      HFileBlockDefaultEncodingContext defaultEncodingCtx =
View Full Code Here

        new DoubleOutputStream(dos, baos);
    writeTestKeyValues(doubleOutputStream, blockId, includesMemstoreTS);
    ByteBuffer rawBuf = ByteBuffer.wrap(baos.toByteArray());
    rawBuf.rewind();

    DataBlockEncoder encoder = encoding.getEncoder();
    int headerLen = dummyHeader.length;
    byte[] encodedResultWithHeader = null;
    if (encoder != null) {
      HFileBlockEncodingContext encodingCtx =
          encoder.newDataBlockEncodingContext(algo, encoding, dummyHeader);
      encoder.encodeKeyValues(rawBuf, includesMemstoreTS,
          encodingCtx);
      encodedResultWithHeader =
          encodingCtx.getUncompressedBytesWithHeader();
    } else {
      HFileBlockDefaultEncodingContext defaultEncodingCtx =
View Full Code Here

   * @param encodeCtx where will the output data be stored
   */
  private void encodeBufferToHFileBlockBuffer(ByteBuffer in,
      DataBlockEncoding algo, boolean includesMemstoreTS,
      HFileBlockEncodingContext encodeCtx) {
    DataBlockEncoder encoder = algo.getEncoder();
    try {
      encoder.encodeKeyValues(in, includesMemstoreTS, encodeCtx);
    } catch (IOException e) {
      throw new RuntimeException(String.format(
          "Bug in data block encoder "
              + "'%s', it probably requested too much data, " +
              "exception message: %s.",
View Full Code Here

  @Override
  public HFileBlockEncodingContext newOnDiskDataBlockEncodingContext(
      Algorithm compressionAlgorithm,  byte[] dummyHeader) {
    if (onDisk != null) {
      DataBlockEncoder encoder = onDisk.getEncoder();
      if (encoder != null) {
        return encoder.newDataBlockEncodingContext(
            compressionAlgorithm, onDisk, dummyHeader);
      }
    }
    return new HFileBlockDefaultEncodingContext(compressionAlgorithm,
        null, dummyHeader);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.