Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.BlockCompressorStream


    LzoCompressor.CompressionStrategy strategy = getCompressionStrategy(conf);
    int bufferSize = getBufferSize(conf);
    int compressionOverhead = strategy.name().contains("LZO1") ?
        (bufferSize >> 4) + 64 + 3 : (bufferSize >> 3) + 128 + 3;

    return new BlockCompressorStream(out, compressor, bufferSize,
        compressionOverhead);
  }
View Full Code Here


      di.readFully(data);
      LOG.info("Original data is " + data.length + " bytes.");

      ByteArrayInputStream bi = new ByteArrayInputStream(data);
      ByteArrayOutputStream bo = new ByteArrayOutputStream(data.length);
      BlockCompressorStream co = new BlockCompressorStream(bo, new BuiltInZlibDeflater());
      LOG.info("Starting.");
      long start = System.currentTimeMillis();
      IOUtils.copy(bi, co);
      co.close();
      long end = System.currentTimeMillis();
      LOG.info("Compression took " + ((end - start) / 1000d) + " ms");
      LOG.info("Compressed data is " + bo.size() + " bytes.");

      if (true)
View Full Code Here

        int bufferSize = getBufferSize(conf);
        int compressionOverhead = strategy.name().contains("LZO1")
                ? (bufferSize >> 4) + 64 + 3
                : (bufferSize >> 3) + 128 + 3;

        return new BlockCompressorStream(out, compressor, bufferSize,
                compressionOverhead);
    }
View Full Code Here

            LOG.info("Original data is " + data.length + " bytes.");

            for (int i = 0; i < 1; i++) {
                ByteArrayInputStream bi = new ByteArrayInputStream(data);
                ByteArrayOutputStream bo = new ByteArrayOutputStream(data.length);
                BlockCompressorStream co = new BlockCompressorStream(bo, new LzoCompressor(), 64 * 1024, 18);
                LOG.info("Starting.");
                long start = System.currentTimeMillis();
                IOUtils.copy(bi, co);
                co.close();
                long end = System.currentTimeMillis();
                LOG.info("Compression took " + ((end - start) / 1000d) + " ms");
                LOG.info("Compressed data is " + bo.size() + " bytes.");

                byte[] cb = bo.toByteArray();
View Full Code Here

    byte[] buf = null;
    BlockDecompressorStream blockDecompressorStream = null;
    try {
      // compress empty stream
      bytesOut = new ByteArrayOutputStream();
      BlockCompressorStream blockCompressorStream = new BlockCompressorStream(
          bytesOut, new SnappyCompressor(), 1024, 0);
      // close without write
      blockCompressorStream.close();

      // check compressed output
      buf = bytesOut.toByteArray();
      assertEquals("empty stream compressed output size != 4", 4, buf.length);
View Full Code Here

    int compressionOverhead = (bufferSize / 6) + 32;
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    try {
      DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
      CompressionOutputStream deflateFilter = new BlockCompressorStream(
          compressedDataBuffer, new SnappyCompressor(bufferSize), bufferSize,
          compressionOverhead);
      deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));

      deflateOut.write(bytes, 0, bytes.length);
      deflateOut.flush();
      deflateFilter.finish();

      DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
      deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
          compressedDataBuffer.getLength());
View Full Code Here

    byte[] buf = null;
    BlockDecompressorStream blockDecompressorStream = null;
    try {
      // compress empty stream
      bytesOut = new ByteArrayOutputStream();
      BlockCompressorStream blockCompressorStream = new BlockCompressorStream(
          bytesOut, new Lz4Compressor(), 1024, 0);
      // close without write
      blockCompressorStream.close();
      // check compressed output
      buf = bytesOut.toByteArray();
      assertEquals("empty stream compressed output size != 4", 4, buf.length);
      // use compressed output as input for decompression
      bytesIn = new ByteArrayInputStream(buf);
View Full Code Here

    byte[] bytes = generate(BYTE_SIZE);
    int bufferSize = 262144;
    int compressionOverhead = (bufferSize / 6) + 32;
    try {
      DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
      CompressionOutputStream deflateFilter = new BlockCompressorStream(
          compressedDataBuffer, new Lz4Compressor(bufferSize), bufferSize,
          compressionOverhead);
      deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
      deflateOut.write(bytes, 0, bytes.length);
      deflateOut.flush();
      deflateFilter.finish();

      DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
      deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
          compressedDataBuffer.getLength());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.BlockCompressorStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.