Package org.apache.hadoop.hbase.io.hfile.HFile

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()


    writer.close();
    fout.close();
    FSDataInputStream fin = fs.open(mFile);
    Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile),
        this.fs.getFileStatus(mFile).getLen(), cacheConf);
    reader.loadFileInfo();
    // No data -- this should return false.
    assertFalse(reader.getScanner(false, false).seekTo());
    someReadingWithMetaBlock(reader);
    fs.delete(mFile, true);
    reader.close();
View Full Code Here


          .create();
      writer.append("foo".getBytes(), "value".getBytes());
      writer.close();
      fout.close();
      Reader reader = HFile.createReader(fs, mFile, cacheConf);
      reader.loadFileInfo();
      assertNull(reader.getMetaBlock("non-existant", false));
    }
  }

  /**
 
View Full Code Here

    int miss = 0;
    long totalBytes = 0;
    FSDataInputStream fsdis = fs.open(path);
    Reader reader =
      new Reader(fsdis, fs.getFileStatus(path).getLen(), null, false);
    reader.loadFileInfo();
    KeySampler kSampler =
        new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
            keyLenGen);
    HFileScanner scanner = reader.getScanner(false, false);
    BytesWritable key = new BytesWritable();
View Full Code Here

  public void testEmptyHFile() throws IOException {
    Path f = new Path(ROOT_DIR, getName());
    Writer w = new Writer(this.fs, f);
    w.close();
    Reader r = new Reader(fs, f, null, false);
    r.loadFileInfo();
    assertNull(r.getFirstKey());
    assertNull(r.getLastKey());
  }

  // write some records into the tfile
View Full Code Here

    fout.close();
    FSDataInputStream fin = fs.open(ncTFile);
    Reader reader = new Reader(fs.open(ncTFile),
      fs.getFileStatus(ncTFile).getLen(), null, false);
    // Load up the index.
    reader.loadFileInfo();
    // Get a scanner that caches and that does not use pread.
    HFileScanner scanner = reader.getScanner(true, false);
    // Align scanner at start of the file.
    scanner.seekTo();
    readAllRecords(scanner);
View Full Code Here

    writer.close();
    fout.close();
    FSDataInputStream fin = fs.open(mFile);
    Reader reader = new Reader(fs.open(mFile), this.fs.getFileStatus(mFile)
        .getLen(), null, false);
    reader.loadFileInfo();
    // No data -- this should return false.
    assertFalse(reader.getScanner(false, false).seekTo());
    someReadingWithMetaBlock(reader);
    fs.delete(mFile, true);
    reader.close();
View Full Code Here

        Compression.Algorithm.NONE, null);
    writer.append("foo".getBytes(), "value".getBytes());
    writer.close();
    fout.close();
    Reader reader = new Reader(fs, mFile, null, false);
    reader.loadFileInfo();
    assertNull(reader.getMetaBlock("non-existant"));
  }

  /**
   * Make sure the orginals for our compression libs doesn't change on us.
View Full Code Here

    Path path = new Path("/Users/ryan/rfile.big.txt");
    long start = System.currentTimeMillis();
    SimpleBlockCache cache = new SimpleBlockCache();
    //LruBlockCache cache = new LruBlockCache();
    Reader reader = new HFile.Reader(lfs, path, cache, false);
    reader.loadFileInfo();
    System.out.println(reader.trailer);
    long end = System.currentTimeMillis();
   
    System.out.println("Index read time: " + (end - start));
View Full Code Here

        HColumnDescriptor hcd = htd.getFamily(Bytes.toBytes(familyStr));
        // verify that the compression on this file matches the configured
        // compression
        Path dataFilePath = fs.listStatus(f.getPath())[0].getPath();
        Reader reader = HFile.createReader(fs, dataFilePath, new CacheConfig(conf), conf);
        Map<byte[], byte[]> fileInfo = reader.loadFileInfo();

        byte[] bloomFilter = fileInfo.get(StoreFile.BLOOM_FILTER_TYPE_KEY);
        if (bloomFilter == null) bloomFilter = Bytes.toBytes("NONE");
        assertEquals("Incorrect bloom filter used for column family " + familyStr +
          "(reader: " + reader + ")",
View Full Code Here

        HColumnDescriptor hcd = htd.getFamily(Bytes.toBytes(familyStr));
        // verify that the compression on this file matches the configured
        // compression
        Path dataFilePath = fs.listStatus(f.getPath())[0].getPath();
        Reader reader = HFile.createReader(fs, dataFilePath, new CacheConfig(conf), conf);
        Map<byte[], byte[]> fileInfo = reader.loadFileInfo();

        byte[] bloomFilter = fileInfo.get(StoreFile.BLOOM_FILTER_TYPE_KEY);
        if (bloomFilter == null) bloomFilter = Bytes.toBytes("NONE");
        assertEquals("Incorrect bloom filter used for column family " + familyStr +
          "(reader: " + reader + ")",
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.