Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.DefaultCodec


  public TestSequenceFile(String name) { super(name); }

  /** Unit tests for SequenceFile. */
  public void testZlibSequenceFile() throws Exception {
    LOG.info("Testing SequenceFile with DefaultCodec");
    compressedSeqFileTest(new DefaultCodec());
    LOG.info("Successfully tested SequenceFile with DefaultCodec");
  }
View Full Code Here


  public void testSequenceFileMetadata() throws Exception {
    LOG.info("Testing SequenceFile with metadata");
    int count = 1024 * 10;
    int megabytes = 1;
    int factor = 5;
    CompressionCodec codec = new DefaultCodec();
    Path file = new Path(System.getProperty("test.build.data",".")+"/test.seq.metadata");
    Path recordCompressedFile =
      new Path(System.getProperty("test.build.data",".")+"/test.rc.seq.metadata");
    Path blockCompressedFile =
      new Path(System.getProperty("test.build.data",".")+"/test.bc.seq.metadata");
View Full Code Here

      (short) conf.getInt("hbase.regionserver.hlog.replication",
        fs.getDefaultReplication()),
      conf.getLong("hbase.regionserver.hlog.blocksize",
        fs.getDefaultBlockSize()),
      SequenceFile.CompressionType.NONE,
      new DefaultCodec(),
      null,
      new Metadata());

    // Get at the private FSDataOutputStream inside in SequenceFile so we can
    // call sync on it.  Make it accessible.  Stash it aside for call up in
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass, null, new Metadata());
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name, keyClass, valClass,
                                        new DefaultCodec());
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name, keyClass, valClass,
                                       new DefaultCodec());
    }
   
    return writer;
  }
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass, progress, new Metadata());
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name,
                                        keyClass, valClass, new DefaultCodec(), progress, new Metadata());
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name,
                                       keyClass, valClass, new DefaultCodec(), progress, new Metadata());
    }
   
    return writer;
  }
View Full Code Here

          } catch (ClassNotFoundException cnfe) {
            throw new IllegalArgumentException("Unknown codec: " +
                                               codecClassname, cnfe);
          }
        } else {
          codec = new DefaultCodec();
          ((Configurable)codec).setConf(conf);
        }
      }
     
      this.metadata = new Metadata();
View Full Code Here

  public TestSequenceFile(String name) { super(name); }

  /** Unit tests for SequenceFile. */
  public void testZlibSequenceFile() throws Exception {
    LOG.info("Testing SequenceFile with DefaultCodec");
    compressedSeqFileTest(new DefaultCodec());
    LOG.info("Successfully tested SequenceFile with DefaultCodec");
  }
View Full Code Here

  public void testSequenceFileMetadata() throws Exception {
    LOG.info("Testing SequenceFile with metadata");
    int count = 1024 * 10;
    int megabytes = 1;
    int factor = 5;
    CompressionCodec codec = new DefaultCodec();
    Path file = new Path(System.getProperty("test.build.data",".")+"/test.seq.metadata");
    Path recordCompressedFile =
      new Path(System.getProperty("test.build.data",".")+"/test.rc.seq.metadata");
    Path blockCompressedFile =
      new Path(System.getProperty("test.build.data",".")+"/test.bc.seq.metadata");
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass);
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name, keyClass, valClass,
          new DefaultCodec());
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name, keyClass, valClass,
          new DefaultCodec());
    }
   
    return writer;
  }
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass, progress);
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name,
          keyClass, valClass, new DefaultCodec(), progress);
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name,
          keyClass, valClass, new DefaultCodec(), progress);
    }
   
    return writer;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.DefaultCodec

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.