Package org.apache.hadoop.hbase.io.hfile

Examples of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder


    // decode
    ByteArrayInputStream bais = new ByteArrayInputStream(encodedData, ENCODED_DATA_OFFSET,
        encodedData.length - ENCODED_DATA_OFFSET);
    DataInputStream dis = new DataInputStream(bais);
    ByteBuffer actualDataset;
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false)
        .withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags)
        .withCompression(Compression.Algorithm.NONE).build();
    actualDataset = encoder.decodeKeyValues(dis, encoder.newDataBlockDecodingContext(meta));
    actualDataset.rewind();
View Full Code Here


      if (encoding.getEncoder() == null || encoding == DataBlockEncoding.PREFIX_TREE) {
        continue;
      }

      DataBlockEncoder encoder = encoding.getEncoder();
      HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false)
          .withIncludesMvcc(false).withIncludesTags(false)
          .withCompression(Compression.Algorithm.NONE).build();
      HFileBlockEncodingContext encodingContext = encoder.newDataBlockEncodingContext(encoding,
          HConstants.HFILEBLOCK_DUMMY_HEADER, meta);
      ByteBuffer encodedBuffer = TestDataBlockEncoders.encodeKeyValues(encoding, kvs,
View Full Code Here

  private HFileContext createFileContext(Compression.Algorithm compression,
      boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {
    if (compression == null) {
      compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;
    }
    HFileContext hFileContext = new HFileContextBuilder()
                                .withIncludesMvcc(includeMVCCReadpoint)
                                .withIncludesTags(includesTag)
                                .withCompression(compression)
                                .withCompressTags(family.shouldCompressTags())
                                .withChecksumType(checksumType)
View Full Code Here

      Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();

      int blocksize = familyDescriptor.getBlocksize();
      Algorithm compression = familyDescriptor.getCompression();
      BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
      HFileContext hFileContext = new HFileContextBuilder()
                                  .withCompression(compression)
                                  .withChecksumType(HStore.getChecksumType(conf))
                                  .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
                                  .withBlockSize(blocksize)
                                  .withDataBlockEncoding(familyDescriptor.getDataBlockEncoding())
View Full Code Here

    StoreFile f = this.store.getStorefiles().iterator().next();
    Path storedir = f.getPath().getParent();
    long seqid = f.getMaxSequenceId();
    Configuration c = HBaseConfiguration.create();
    FileSystem fs = FileSystem.get(c);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
    StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c),
        fs)
            .withOutputDir(storedir)
            .withFileContext(meta)
            .build();
View Full Code Here

    Path p = new Path(root_dir, "test");

    Configuration conf = TEST_UTIL.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
    HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
        .withPath(fs, p)
        .withFileContext(meta)
        .create();
View Full Code Here

      String root_dir = TEST_UTIL.getDataTestDir().toString();
      Path p = new Path(root_dir, "test");
      Configuration conf = TEST_UTIL.getConfiguration();
      FileSystem fs = FileSystem.get(conf);
      CacheConfig cacheConf = new CacheConfig(conf);
      HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
      HFile.Writer w = HFile.getWriterFactory(conf, cacheConf)
              .withPath(fs, p)
              .withFileContext(meta)
              .create();
View Full Code Here

  @Test
  public void testReseek() throws Exception {
    // write the file
    Path f = new Path(ROOT_DIR, "testReseek");
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true)
        .withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build();
    // Make a store file and write data to it.
    StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs).withFilePath(f)
        .withFileContext(meta).build();

View Full Code Here

        "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem)util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    FileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFile.Writer writer = new StoreFile.WriterBuilder(
        util.getConfiguration(), cacheConf, hfs)
            .withOutputDir(hfilePath)
            .withFileContext(meta)
            .build();
View Full Code Here

        "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem)util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    HFileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFile.Writer writer = new StoreFile.WriterBuilder(
        util.getConfiguration(), cacheConf, hfs)
            .withOutputDir(hfilePath)
            .withFileContext(meta)
            .build();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.