Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.Decompressor


        && isInputCompressionEmulationEnabled(conf)) {
      CompressionCodecFactory compressionCodecs =
        new CompressionCodecFactory(conf);
      CompressionCodec codec = compressionCodecs.getCodec(file);
      if (codec != null) {
        Decompressor decompressor = CodecPool.getDecompressor(codec);
        if (decompressor != null) {
          CompressionInputStream in =
            codec.createInputStream(fs.open(file), decompressor);
          //TODO Seek doesnt work with compressed input stream.
          //     Use SplittableCompressionCodec?
View Full Code Here


      {
      InputStream inputStream;

      inputStream = new FileInputStream( file );

      Decompressor decompressor = null;

      if( codec != null )
        {
        decompressor = getDecompressor();
        inputStream = codec.createInputStream( inputStream, decompressor );
        }

      final Decompressor finalDecompressor = decompressor;
      return new HadoopTupleInputStream( inputStream, tupleSerialization.getElementReader() )
      {
      @Override
      public void close() throws IOException
        {
View Full Code Here

      this.end = in.getPos() + length;
      init(tempReader);
    }
   
    private Decompressor getPooledOrNewDecompressor() {
      Decompressor decompressor = null;
      decompressor = decompressorPool.getCodec(codec.getDecompressorType());
      if (decompressor == null) {
        decompressor = codec.createDecompressor();
      }
      return decompressor;
View Full Code Here

     * @throws IOException
     */
    private ByteBuffer decompress(final long offset, final int compressedSize,
      final int decompressedSize, final boolean pread)
    throws IOException {
      Decompressor decompressor = null;
      ByteBuffer buf = null;
      try {
        decompressor = this.compressAlgo.getDecompressor();
        // My guess is that the bounded range fis is needed to stop the
        // decompressor reading into next block -- IIRC, it just grabs a
View Full Code Here

     * @throws IOException
     */
    protected void decompress(byte[] dest, int destOffset,
        InputStream bufferedBoundedStream,
        int uncompressedSize) throws IOException {
      Decompressor decompressor = null;
      try {
        decompressor = compressAlgo.getDecompressor();
        InputStream is = compressAlgo.createDecompressionStream(
            bufferedBoundedStream, decompressor, 0);

View Full Code Here

    if (codecNameUTF8Length == 0) {
      // no compression
      uncompressedData = new ByteArrayInputStream(storedData);
    } else {
      CompressionCodec codec = getCodecFromName(codecNameText, conf);
      Decompressor decompressor = null;
      if (decompressorCache != null) {
        // Create decompressor and add to cache if needed.
        decompressor = decompressorCache.get(codecNameText);
        if (decompressor == null) {
          decompressor = codec.createDecompressor();
        } else {
          decompressor.reset();
        }
      }
      if (decompressor == null) {
        uncompressedData = codec.createInputStream(new ByteArrayInputStream(storedData));
      } else {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.Decompressor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.