Package org.apache.hadoop.io.SequenceFile

Examples of org.apache.hadoop.io.SequenceFile.CompressionType


  }

  @Override
  public void open(String filePath, FlumeFormatter fmt) throws IOException {
    DefaultCodec defCodec = new DefaultCodec();
    CompressionType cType = CompressionType.BLOCK;
    open(filePath, defCodec, cType, fmt);
  }
View Full Code Here


  }

  @Override
  public void open(String filePath) throws IOException {
    DefaultCodec defCodec = new DefaultCodec();
    CompressionType cType = CompressionType.BLOCK;
    open(filePath, defCodec, cType);
  }
View Full Code Here

        // get the path of the temporary output file
        Path file = FileOutputFormat.getTaskOutputPath(job, name);

        FileSystem fs = file.getFileSystem(job);
        CompressionType compressionType = CompressionType.BLOCK;
        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(job, DefaultCodec.class);
        CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, job);

        // set the schema metadata
View Full Code Here

                working.add(factory.createModelObject());
            }

            // 圧縮に関する情報を取得
            String strCompType = ConfigurationLoader.getProperty(Constants.PROP_KEY_IMP_SEQ_FILE_COMP_TYPE);
            CompressionType compType = getCompType(strCompType);

            // Writerを経由して別スレッドで書き出す
            if (compType == CompressionType.NONE) {
                output = TemporaryStorage.openOutput(conf, targetTableModel, new Path(dfsFilePath), null);
            } else {
View Full Code Here

     * 圧縮の種類を取得する。
     * @param strCompType CompressionTypeの文字列
     * @return CompressionType
     */
    protected CompressionType getCompType(String strCompType) {
        CompressionType compType = null;
        try {
            compType = CompressionType.valueOf(strCompType);
        } catch (Exception e) {
            compType = CompressionType.NONE;
            LOG.warn("TG-EXTRACTOR-02004", strCompType);
View Full Code Here

    * @throws Exception
    */
    @Test
    public void getCompType01() throws Exception {
        DfsFileImport fileImport = new DfsFileImport();
        CompressionType compType = fileImport.getCompType("NONE");
        assertEquals(CompressionType.NONE, compType);
    }
View Full Code Here

    * @throws Exception
    */
    @Test
    public void getCompType02() throws Exception {
        DfsFileImport fileImport = new DfsFileImport();
        CompressionType compType = fileImport.getCompType("BLOCK");
        assertEquals(CompressionType.BLOCK, compType);
    }
View Full Code Here

    * @throws Exception
    */
    @Test
    public void getCompType03() throws Exception {
        DfsFileImport fileImport = new DfsFileImport();
        CompressionType compType = fileImport.getCompType("RECORD");
        assertEquals(CompressionType.RECORD, compType);
    }
View Full Code Here

    * @throws Exception
    */
    @Test
    public void getCompType04() throws Exception {
        DfsFileImport fileImport = new DfsFileImport();
        CompressionType compType = fileImport.getCompType("DUMMY");
        assertEquals(CompressionType.NONE, compType);
    }
View Full Code Here

    throws IOException {

    Path file = new Path(job.getOutputPath(), name);
    FileSystem fs = file.getFileSystem(job);
    CompressionCodec codec = null;
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(job)) {
      // find the kind of compression to do
      compressionType = SequenceFile.getCompressionType(job);

      // find the right codec
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.SequenceFile.CompressionType

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.