Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.DecompressorStream


      // invalid 0 and 1 bytes , must be 31, -117
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 0, 0, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 0 and 1 byte in gzip stream" + ex);
    }

    // invalid 2 byte, must be 8
    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 31, -117, 7, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 2 byte in gzip stream" + ex);
    }

    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 31, -117, 8, -32, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 3 byte in gzip stream" + ex);
    }
    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 31, -117, 8, 4, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
View Full Code Here


      LOG.debug("Yielding stream to user with length " + streamLen);
      this.userInputStream = new FixedLengthInputStream(this.dataIn, streamLen);
      if (this.codec != null) {
        // The user needs to decompress the data; wrap the InputStream.
        decompressor.reset();
        this.userInputStream = new DecompressorStream(
            this.userInputStream, decompressor);
      }
      return this.userInputStream;
    }
View Full Code Here

      // invalid 0 and 1 bytes , must be 31, -117
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 0, 0, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 0 and 1 byte in gzip stream" + ex);
    }

    // invalid 2 byte, must be 8
    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 31, -117, 7, 1, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 2 byte in gzip stream" + ex);
    }

    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 31, -117, 8, -32, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
      fail("invalid 3 byte in gzip stream" + ex);
    }
    try {
      int buffSize = 1 * 1024;
      byte buffer[] = new byte[buffSize];
      Decompressor decompressor = new BuiltInGzipDecompressor();
      DataInputBuffer gzbuf = new DataInputBuffer();
      decompStream = new DecompressorStream(gzbuf, decompressor);
      gzbuf.reset(new byte[] { 31, -117, 8, 4, 1, 1, 1, 11, 1, 1, 1, 1 }, 11);
      decompStream.read(buffer);
    } catch (IOException ioex) {
      // expected
    } catch (Exception ex) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.DecompressorStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.