Examples of loadFileInfo()


Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

            // verify that the compression on this file matches the configured
            // compression
            Path dataFilePath = fileSystem.listStatus(f.getPath())[0].getPath();
            Reader reader = HFile.createReader(fileSystem, dataFilePath,
                new CacheConfig(conf));
            reader.loadFileInfo();
            assertEquals("Incorrect compression used for column family " + familyStr
                         + "(reader: " + reader + ")",
                         configuredCompression.get(familyStr), reader.getCompressionAlgorithm());
            break;
          }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

    Path f = new Path(ROOT_DIR, getName());
    Writer w =
        HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).create();
    w.close();
    Reader r = HFile.createReader(fs, f, cacheConf);
    r.loadFileInfo();
    assertNull(r.getFirstKey());
    assertNull(r.getLastKey());
  }

  /**
 
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

    FSDataInputStream fin = fs.open(ncTFile);
    Reader reader = HFile.createReaderFromStream(ncTFile, fs.open(ncTFile),
      fs.getFileStatus(ncTFile).getLen(), cacheConf);
    System.out.println(cacheConf.toString());
    // Load up the index.
    reader.loadFileInfo();
    // Get a scanner that caches and that does not use pread.
    HFileScanner scanner = reader.getScanner(true, false);
    // Align scanner at start of the file.
    scanner.seekTo();
    readAllRecords(scanner);
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

    writer.close();
    fout.close();
    FSDataInputStream fin = fs.open(mFile);
    Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile),
        this.fs.getFileStatus(mFile).getLen(), cacheConf);
    reader.loadFileInfo();
    // No data -- this should return false.
    assertFalse(reader.getScanner(false, false).seekTo());
    someReadingWithMetaBlock(reader);
    fs.delete(mFile, true);
    reader.close();
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

          .create();
      writer.append("foo".getBytes(), "value".getBytes());
      writer.close();
      fout.close();
      Reader reader = HFile.createReader(fs, mFile, cacheConf);
      reader.loadFileInfo();
      assertNull(reader.getMetaBlock("non-existant", false));
    }
  }

  /**
 
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

    int miss = 0;
    long totalBytes = 0;
    FSDataInputStream fsdis = fs.open(path);
    Reader reader = HFile.createReaderFromStream(path, fsdis,
        fs.getFileStatus(path).getLen(), new CacheConfig(conf));
    reader.loadFileInfo();
    KeySampler kSampler =
        new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
            keyLenGen);
    HFileScanner scanner = reader.getScanner(false, USE_PREAD);
    BytesWritable key = new BytesWritable();
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

            // verify that the compression on this file matches the configured
            // compression
            Path dataFilePath = fileSystem.listStatus(f.getPath())[0].getPath();
            Reader reader = HFile.createReader(fileSystem, dataFilePath,
                new CacheConfig(conf));
            reader.loadFileInfo();
            assertEquals("Incorrect compression used for column family " + familyStr
                         + "(reader: " + reader + ")",
                         configuredCompression.get(familyStr), reader.getCompressionAlgorithm());
            break;
          }
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

    SimpleBlockCache cache = new SimpleBlockCache();
    CacheConfig cacheConf = new CacheConfig(cache, true, false, false, false,
        false, false, false);

    Reader reader = HFile.createReader(lfs, path, cacheConf);
    reader.loadFileInfo();
    System.out.println(reader.getTrailer());
    long end = System.currentTimeMillis();

    System.out.println("Index read time: " + (end - start));
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

    int miss = 0;
    long totalBytes = 0;
    FSDataInputStream fsdis = fs.open(path);
    Reader reader = HFile.createReader(path, fsdis,
        fs.getFileStatus(path).getLen(), new CacheConfig(conf));
    reader.loadFileInfo();
    KeySampler kSampler =
        new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(),
            keyLenGen);
    HFileScanner scanner = reader.getScanner(false, USE_PREAD);
    BytesWritable key = new BytesWritable();
View Full Code Here

Examples of org.apache.hadoop.hbase.io.hfile.HFile.Reader.loadFileInfo()

    if (cacheConf == null) cacheConf = new CacheConfig(conf);
    Path f = new Path(ROOT_DIR, getName());
    Writer w = HFile.getWriterFactory(conf, cacheConf).createWriter(this.fs, f);
    w.close();
    Reader r = HFile.createReader(fs, f, cacheConf);
    r.loadFileInfo();
    assertNull(r.getFirstKey());
    assertNull(r.getLastKey());
  }

  // write some records into the tfile
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.