Package org.apache.hadoop.io.compress

Examples of org.apache.hadoop.io.compress.DefaultCodec


    // no-op
  }

  @Override
  public void open(String filePath, FlumeFormatter fmt) throws IOException {
    DefaultCodec defCodec = new DefaultCodec();
    CompressionType cType = CompressionType.BLOCK;
    open(filePath, defCodec, cType, fmt);
  }
View Full Code Here


      Class<? extends HLogKey> keyClass, Class<? extends KeyValue> valueClass)
      throws IOException {
    return SequenceFile.createWriter(this.fs, this.conf, path, keyClass,
        valueClass, fs.getConf().getInt("io.file.buffer.size", 4096), fs
            .getDefaultReplication(), this.blocksize,
        SequenceFile.CompressionType.NONE, new DefaultCodec(), null,
        new Metadata());
  }
View Full Code Here

        + useRawLocalFileSystem);
  }

  @Override
  public void open(String filePath) throws IOException {
    DefaultCodec defCodec = new DefaultCodec();
    CompressionType cType = CompressionType.BLOCK;
    open(filePath, defCodec, cType);
  }
View Full Code Here

     * Creates a compressed sequence file.
     * @throws Exception if failed
     */
    @Test
    public void write_compressed() throws Exception {
        DefaultCodec codec = new DefaultCodec();
        codec.setConf(conf);

        Path path = new Path("testing");

        LongWritable key = new LongWritable();
        LongWritable value = new LongWritable();
View Full Code Here

     */
    @Test
    public void output_compressed() throws Exception {
        LocalFileSystem fs = FileSystem.getLocal(conf);
        Path path = new Path(folder.newFile("testing").toURI());
        ModelOutput<StringOption> out = format.codec(new DefaultCodec())
            .createOutput(StringOption.class, fs, path, new Counter());
        try {
            out.write(new StringOption("Hello, world!"));
        } finally {
            out.close();
View Full Code Here

      for (int pi = 0; pi < 5; pi++) {
    Path path = new Path(new Path(file.getAbsolutePath()),
      "parition" + pi);

    datePartitionedRowCount += writeRCFileTest(fs, simpleRowCount,
      path, columnCount, new DefaultCodec(), columnCount);

    new File(path.toString()).deleteOnExit();
    datePartitionedRCFiles.add(path.toString());
    datePartitionedDirs.add(file.toString());
View Full Code Here

        + "/attempt-00000");
      Path hourFilePath = new Path(rcFile.getAbsolutePath());
      rcFile.deleteOnExit();

      writeRCFileTest(fs, simpleRowCount, hourFilePath,
        columnCount, new DefaultCodec(), columnCount);

      yearMonthDayHourcalendar.add(Calendar.HOUR_OF_DAY, 1);
        }

        yearMonthDayHourcalendar.add(Calendar.DAY_OF_MONTH, 1);
View Full Code Here

  simpleDataFile.deleteOnExit();

  Path path = new Path(simpleDataFile.getPath());

  writeRCFileTest(fs, simpleRowCount, path, columnCount,
    new DefaultCodec(), columnCount);

  // produce a folder of simple data
  simpleDataDir = new File("simpleDataDir" + System.currentTimeMillis());
  simpleDataDir.mkdir();

  for (int i = 0; i < simpleDirFileCount; i++) {

      simpleDataFile = new File(simpleDataDir, "testhiveColumnarLoader-"
        + i + ".txt");

      Path filePath = new Path(simpleDataFile.getPath());

      writeRCFileTest(fs, simpleRowCount, filePath, columnCount,
        new DefaultCodec(), columnCount);

  }

    }
View Full Code Here

              conf.getInt("hbase.regionserver.hlog.replication",
              FSUtils.getDefaultReplication(fs, path))),
            Long.valueOf(conf.getLong("hbase.regionserver.hlog.blocksize",
                FSUtils.getDefaultBlockSize(fs, path))),
            Boolean.valueOf(false) /*createParent*/,
            SequenceFile.CompressionType.NONE, new DefaultCodec(),
            createMetadata(conf, compress)
            });
    } catch (InvocationTargetException ite) {
      // function was properly called, but threw it's own exception
      throw new IOException(ite.getCause());
    } catch (Exception e) {
      // ignore all other exceptions. related to reflection failure
    }

    // if reflection failed, use the old createWriter
    if (this.writer == null) {
      LOG.debug("new createWriter -- HADOOP-6840 -- not available");
      this.writer = SequenceFile.createWriter(fs, conf, path,
        HLog.getKeyClass(conf), WALEdit.class,
        fs.getConf().getInt("io.file.buffer.size", 4096),
        (short) conf.getInt("hbase.regionserver.hlog.replication",
          FSUtils.getDefaultReplication(fs, path)),
        conf.getLong("hbase.regionserver.hlog.blocksize",
          FSUtils.getDefaultBlockSize(fs, path)),
        SequenceFile.CompressionType.NONE,
        new DefaultCodec(),
        null,
        createMetadata(conf, compress));
    } else {
      LOG.debug("using new createWriter -- HADOOP-6840");
    }
View Full Code Here

          } catch (ClassNotFoundException cnfe) {
            throw new IllegalArgumentException("Unknown codec: " +
                                               codecClassname, cnfe);
          }
        } else {
          codec = new DefaultCodec();
          ((Configurable)codec).setConf(conf);
        }
      }
     
      this.metadata = new Metadata();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.compress.DefaultCodec

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.