Examples of EncodedSeeker


Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    int batchId = numBatchesWritten++;
    ByteBuffer dataBuffer = generateFixedTestData(kvset, batchId);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR,
        false);
    byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    int batchId = numBatchesWritten++;
    ByteBuffer dataBuffer = generateFixedTestData(kvset, batchId, false);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
    byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(readBuffer);

    // Seek before the first keyvalue;
    KeyValue seekKey = KeyValue.createFirstDeleteFamilyOnRow(
        getRowKey(batchId, 0), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey.getBuffer(), seekKey.getKeyOffset(),
        seekKey.getKeyLength(), true);
    assertEquals(null, seeker.getKeyValue());

    // Seek before the middle keyvalue;
    seekKey = KeyValue.createFirstDeleteFamilyOnRow(
        getRowKey(batchId, NUM_ROWS_PER_BATCH / 3), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey.getBuffer(), seekKey.getKeyOffset(),
        seekKey.getKeyLength(), true);
    assertNotNull(seeker.getKeyValue());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), seeker
        .getKeyValue().getRow());

    // Seek before the last keyvalue;
    seekKey = KeyValue.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"),
        CF_BYTES);
    seeker.seekToKeyInBlock(seekKey.getBuffer(), seekKey.getKeyOffset(),
        seekKey.getKeyLength(), true);
    assertNotNull(seeker.getKeyValue());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker
        .getKeyValue().getRow());
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    PrefixTreeCodec encoder = new PrefixTreeCodec();
    ByteBuffer dataBuffer = generateRandomTestData(kvset, numBatchesWritten++);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
    byte[] onDiskBytes=blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(readBuffer);
    KeyValue previousKV = null;
    do{
      KeyValue currentKV = seeker.getKeyValue();
      if (previousKV != null && KeyValue.COMPARATOR.compare(currentKV, previousKV) < 0) {
        dumpInputKVSet();
        fail("Current kv " + currentKV + " is smaller than previous keyvalue "
            + previousKV);
      }
      previousKV = currentKV;
    } while (seeker.next());
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    int batchId = numBatchesWritten++;
    ByteBuffer dataBuffer = generateRandomTestData(kvset, batchId);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR, false);
    byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
    verifySeeking(seeker, readBuffer, batchId);
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    int batchId = numBatchesWritten++;
    ByteBuffer dataBuffer = generateRandomTestData(kvset, batchId);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false);
    byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
    verifySeeking(seeker, readBuffer, batchId);
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    int batchId = numBatchesWritten++;
    ByteBuffer dataBuffer = generateFixedTestData(kvset, batchId);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR,
        false);
    byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    int batchId = numBatchesWritten++;
    ByteBuffer dataBuffer = generateFixedTestData(kvset, batchId, false);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false);
    byte[] onDiskBytes = blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(readBuffer);

    // Seek before the first keyvalue;
    KeyValue seekKey = KeyValue.createFirstDeleteFamilyOnRow(
        getRowKey(batchId, 0), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey.getBuffer(), seekKey.getKeyOffset(),
        seekKey.getKeyLength(), true);
    assertEquals(null, seeker.getKeyValue());

    // Seek before the middle keyvalue;
    seekKey = KeyValue.createFirstDeleteFamilyOnRow(
        getRowKey(batchId, NUM_ROWS_PER_BATCH / 3), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey.getBuffer(), seekKey.getKeyOffset(),
        seekKey.getKeyLength(), true);
    assertNotNull(seeker.getKeyValue());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), seeker
        .getKeyValue().getRow());

    // Seek before the last keyvalue;
    seekKey = KeyValue.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"),
        CF_BYTES);
    seeker.seekToKeyInBlock(seekKey.getBuffer(), seekKey.getKeyOffset(),
        seekKey.getKeyLength(), true);
    assertNotNull(seeker.getKeyValue());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker
        .getKeyValue().getRow());
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

    PrefixTreeCodec encoder = new PrefixTreeCodec();
    ByteBuffer dataBuffer = generateRandomTestData(kvset, numBatchesWritten++);
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        Algorithm.NONE, DataBlockEncoding.PREFIX_TREE, new byte[0]);
    encoder.encodeKeyValues(dataBuffer, false, blkEncodingCtx);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.KEY_COMPARATOR, false);
    byte[] onDiskBytes=blkEncodingCtx.getOnDiskBytesWithHeader();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes,
        DataBlockEncoding.ID_SIZE, onDiskBytes.length
            - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(readBuffer);
    KeyValue previousKV = null;
    do{
      KeyValue currentKV = seeker.getKeyValue();
      if (previousKV != null && KeyValue.COMPARATOR.compare(currentKV, previousKV) < 0) {
        dumpInputKVSet();
        fail("Current kv " + currentKV + " is smaller than previous keyvalue "
            + previousKV);
      }
      previousKV = currentKV;
    } while (seeker.next());
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

        DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
    generateFixedTestData(kvset, batchId, false, includesTag, encoder, blkEncodingCtx,
        userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR,
        encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE,
        onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(readBuffer);

    // Seek before the first keyvalue;
    KeyValue seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(getRowKey(batchId, 0), CF_BYTES);
    seeker.seekToKeyInBlock(
        new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
            .getKeyLength()), true);
    assertEquals(null, seeker.getKeyValue());

    // Seek before the middle keyvalue;
    seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3),
        CF_BYTES);
    seeker.seekToKeyInBlock(
        new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
            .getKeyLength()), true);
    assertNotNull(seeker.getKeyValue());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), seeker.getKeyValue().getRow());

    // Seek before the last keyvalue;
    seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"), CF_BYTES);
    seeker.seekToKeyInBlock(
        new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
            .getKeyLength()), true);
    assertNotNull(seeker.getKeyValue());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker.getKeyValue().getRow());
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker

                        .build();
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(
        DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    generateRandomTestData(kvset, numBatchesWritten++, includesTag, encoder, blkEncodingCtx,
        userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(KeyValue.COMPARATOR,
        encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE,
        onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(readBuffer);
    Cell previousKV = null;
    do {
      Cell currentKV = seeker.getKeyValue();
      System.out.println(currentKV);
      if (previousKV != null && KeyValue.COMPARATOR.compare(currentKV, previousKV) < 0) {
        dumpInputKVSet();
        fail("Current kv " + currentKV + " is smaller than previous keyvalue " + previousKV);
      }
      if (!includesTag) {
        assertFalse(currentKV.getTagsLength() > 0);
      } else {
        Assert.assertTrue(currentKV.getTagsLength() > 0);
      }
      previousKV = currentKV;
    } while (seeker.next());
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.