Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.DefaultCodec


        "5.3".getBytes("UTF-8"), "howl and hadoop".getBytes("UTF-8"),
        new byte[0], "\\N".getBytes("UTF-8")};

    RCFileOutputFormat.setColumnNumber(conf, 8);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
View Full Code Here


        "5.3".getBytes("UTF-8"), "howl and hadoop".getBytes("UTF-8"),
        new byte[0], "\\N".getBytes("UTF-8")};

    RCFileOutputFormat.setColumnNumber(conf, 8);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
View Full Code Here

        "5.3".getBytes("UTF-8"), "hive and hadoop".getBytes("UTF-8"),
        new byte[0], "NULL".getBytes("UTF-8")};

    RCFileOutputFormat.setColumnNumber(conf, expectedFieldsData.length);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
View Full Code Here

      byte[][] fieldsData) throws IOException, SerDeException {
    fs.delete(file, true);

    RCFileOutputFormat.setColumnNumber(conf, fieldsData.length);
    RCFile.Writer writer = new RCFile.Writer(fs, conf, file, null,
        new DefaultCodec());

    BytesRefArrayWritable bytes = new BytesRefArrayWritable(fieldsData.length);
    for (int i = 0; i < fieldsData.length; i++) {
      BytesRefWritable cu = null;
      cu = new BytesRefWritable(fieldsData[i], 0, fieldsData[i].length);
View Full Code Here

      PerformTestRCFileAndSeqFile testcase = new PerformTestRCFileAndSeqFile(
          isLocalFS, file);

      // change these parameters
      boolean checkCorrect = true;
      CompressionCodec codec = new DefaultCodec();
      testcase.columnMaxSize = 30;

      // testcase.testWithColumnNumber(count, 2, checkCorrect, codec);
      // testcase.testWithColumnNumber(count, 10, checkCorrect, codec);
      // testcase.testWithColumnNumber(count, 25, checkCorrect, codec);
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass, null, new Metadata());
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name, keyClass, valClass,
                                        new DefaultCodec());
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name, keyClass, valClass,
                                       new DefaultCodec());
    }
   
    return writer;
  }
View Full Code Here

   
    if (compressionType == CompressionType.NONE) {
      writer = new Writer(fs, conf, name, keyClass, valClass, progress, new Metadata());
    } else if (compressionType == CompressionType.RECORD) {
      writer = new RecordCompressWriter(fs, conf, name,
                                        keyClass, valClass, new DefaultCodec(), progress, new Metadata());
    } else if (compressionType == CompressionType.BLOCK){
      writer = new BlockCompressWriter(fs, conf, name,
                                       keyClass, valClass, new DefaultCodec(), progress, new Metadata());
    }
   
    return writer;
  }
View Full Code Here

          } catch (ClassNotFoundException cnfe) {
            throw new IllegalArgumentException("Unknown codec: " +
                                               codecClassname, cnfe);
          }
        } else {
          codec = new DefaultCodec();
          ((Configurable)codec).setConf(conf);
        }
      }
     
      this.metadata = new Metadata();
View Full Code Here

                  WritableComparator comparator, Class valClass,
                  SequenceFile.CompressionType compress,
                  Progressable progress)
      throws IOException {
      this(conf, fs, dirName, comparator, valClass,
           compress, new DefaultCodec(), progress);
    }
View Full Code Here

      new RCFile.Writer(fs, conf, file, null,
                        RCFile.createMetadata(new Text("apple"),
                                              new Text("block"),
                                              new Text("cat"),
                                              new Text("dog")),
                        new DefaultCodec());
    BytesRefArrayWritable bytes = new BytesRefArrayWritable(record_1.length);
    for (int i = 0; i < record_1.length; i++) {
      BytesRefWritable cu = new BytesRefWritable(record_1[i], 0,
          record_1[i].length);
      bytes.set(i, cu);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.DefaultCodec

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.