Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.DefaultCodec


  public TestSequenceFile(String name) { super(name); }

  /** Unit tests for SequenceFile. */
  public void testZlibSequenceFile() throws Exception {
    LOG.info("Testing SequenceFile with DefaultCodec");
    compressedSeqFileTest(new DefaultCodec());
    LOG.info("Successfully tested SequenceFile with DefaultCodec");
  }
View Full Code Here


  public void testSequenceFileMetadata() throws Exception {
    LOG.info("Testing SequenceFile with metadata");
    int count = 1024 * 10;
    int megabytes = 1;
    int factor = 5;
    CompressionCodec codec = new DefaultCodec();
    Path file = new Path(System.getProperty("test.build.data",".")+"/test.seq.metadata");
    Path recordCompressedFile =
      new Path(System.getProperty("test.build.data",".")+"/test.rc.seq.metadata");
    Path blockCompressedFile =
      new Path(System.getProperty("test.build.data",".")+"/test.bc.seq.metadata");
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass, null, new Metadata());
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name, keyClass, valClass,
                                        new DefaultCodec());
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name, keyClass, valClass,
                                       new DefaultCodec());
    }
   
    return writer;
  }
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass, progress, new Metadata());
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name,
                                        keyClass, valClass, new DefaultCodec(), progress, new Metadata());
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name,
                                       keyClass, valClass, new DefaultCodec(), progress, new Metadata());
    }
   
    return writer;
  }
View Full Code Here

                 Class keyClass, Class valClass, CompressionType compressionType)
    throws IOException {
    return createWriter(fs, conf, name, keyClass, valClass,
            fs.getConf().getInt("io.file.buffer.size", 4096),
            fs.getDefaultReplication(), fs.getDefaultBlockSize(),
            compressionType, new DefaultCodec(), null, new Metadata());
  }
View Full Code Here

                 Class keyClass, Class valClass, CompressionType compressionType,
                 Progressable progress) throws IOException {
    return createWriter(fs, conf, name, keyClass, valClass,
            fs.getConf().getInt("io.file.buffer.size", 4096),
            fs.getDefaultReplication(), fs.getDefaultBlockSize(),
            compressionType, new DefaultCodec(), progress, new Metadata());
  }
View Full Code Here

          } catch (ClassNotFoundException cnfe) {
            throw new IllegalArgumentException("Unknown codec: " +
                                               codecClassname, cnfe);
          }
        } else {
          codec = new DefaultCodec();
          ((Configurable)codec).setConf(conf);
        }
      }
     
      this.metadata = new Metadata();
View Full Code Here

                  WritableComparator comparator, Class valClass,
                  SequenceFile.CompressionType compress,
                  Progressable progress)
      throws IOException {
      this(conf, fs, dirName, comparator, valClass,
           compress, new DefaultCodec(), progress);
    }
View Full Code Here

        this.writer = SequenceFile.createWriter(this.fs, this.conf, newPath,
          HLogKey.class, HLogEdit.class,
          fs.getConf().getInt("io.file.buffer.size", 4096),
          fs.getDefaultReplication(), this.blocksize,
          SequenceFile.CompressionType.NONE, new DefaultCodec(), null,
          new Metadata());

        LOG.info((oldFile != null?
          "Closed " + oldFile + ", entries=" + this.numEntries + ". ": "") +
          "New log writer: " + FSUtils.getPath(newPath));
View Full Code Here

        "5.3".getBytes("UTF-8"), "howl and hadoop".getBytes("UTF-8"),
        new byte[0], "\\N".getBytes("UTF-8")};

    RCFileOutputFormat.setColumnNumber(conf, 8);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.DefaultCodec

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.