Package org.apache.avro.file

Examples of org.apache.avro.file.CodecFactory


        DataFileWriter<Object> writer = new DataFileWriter<Object>(new PigAvroDatumWriter(schema));

        if (FileOutputFormat.getCompressOutput(context)) {
            int level = conf.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
            String codecName = conf.get(OUTPUT_CODEC, DEFLATE_CODEC);
            CodecFactory factory = codecName.equals(DEFLATE_CODEC)
                ? CodecFactory.deflateCodec(level)
                : CodecFactory.fromString(codecName);
            writer.setCodec(factory);
        }
View Full Code Here


    }
 
    String codecName = opts.hasArgument(codec)
      ? codec.value(opts)
      : DEFLATE_CODEC;
    CodecFactory codecFactory = codecName.equals(DEFLATE_CODEC)
      ? CodecFactory.deflateCodec(compressionLevel)
      : CodecFactory.fromString(codecName);

    BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin);
    BufferedOutputStream outStream = Util.fileOrStdout(nargs.get(1), out);
View Full Code Here

  static <T> void configureDataFileWriter(DataFileWriter<T> writer,
      JobConf job) throws UnsupportedEncodingException {
    if (FileOutputFormat.getCompressOutput(job)) {
      int level = job.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
      String codecName = job.get(AvroJob.OUTPUT_CODEC, DEFLATE_CODEC);
      CodecFactory factory = codecName.equals(DEFLATE_CODEC)
        ? CodecFactory.deflateCodec(level)
        : CodecFactory.fromString(codecName);
      writer.setCodec(factory);
    }
   
View Full Code Here

public class TestAvroKeyRecordWriter {
  @Test
  public void testWrite() throws IOException {
    Schema writerSchema = Schema.create(Schema.Type.INT);
    CodecFactory compressionCodec = CodecFactory.nullCodec();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    TaskAttemptContext context = createMock(TaskAttemptContext.class);

    replay(context);
View Full Code Here

    AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
    AvroValue<TextStats> avroValue = new AvroValue<TextStats>(null);
    @SuppressWarnings("unchecked")
    AvroDatumConverter<AvroValue<TextStats>, ?> valueConverter
        = factory.create((Class<AvroValue<TextStats>>) avroValue.getClass());
    CodecFactory compressionCodec = CodecFactory.nullCodec();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();

    // Use a writer to generate a Avro container file in memory.
    // Write two records: <'apple', TextStats('apple')> and <'banana', TextStats('banana')>.
    AvroKeyValueRecordWriter<Text, AvroValue<TextStats>> writer
View Full Code Here

        codec = SNAPPY_CODEC;
      } else {
        codec = "null";
      }
      int level = conf.getInt(AvroOutputFormat.DEFLATE_LEVEL_KEY, AvroOutputFormat.DEFAULT_DEFLATE_LEVEL);
      CodecFactory factory = codec.equals(DEFLATE_CODEC) ? CodecFactory.deflateCodec(level) : CodecFactory
          .fromString(codec);
      writer.setCodec(factory);
    }
    writer.setSyncInterval(conf.getInt(AvroOutputFormat.SYNC_INTERVAL_KEY, DEFAULT_SYNC_INTERVAL));
View Full Code Here

    Configuration conf = context.getConfiguration();
    if(conf.getBoolean("mapred.output.compress", false)) {
      String codec = conf.get("mapred.output.compression");
      int level = conf.getInt(AvroOutputFormat.DEFLATE_LEVEL_KEY,
          AvroOutputFormat.DEFAULT_DEFLATE_LEVEL);
      CodecFactory factory = codec.equals(DEFLATE_CODEC) ? CodecFactory
          .deflateCodec(level) : CodecFactory.fromString(codec);
      writer.setCodec(factory);
    }
    writer.setSyncInterval(conf.getInt(AvroOutputFormat.SYNC_INTERVAL_KEY,
        DEFAULT_SYNC_INTERVAL));
View Full Code Here

    dataFileWriter = new DataFileWriter<Object>(writer);

    dataFileWriter.setSyncInterval(syncIntervalBytes);

    try {
      CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
      dataFileWriter.setCodec(codecFactory);
    } catch (AvroRuntimeException e) {
      logger.warn("Unable to instantiate avro codec with name (" +
          compressionCodec + "). Compression disabled. Exception follows.", e);
    }
View Full Code Here

public class TestAvroKeyRecordWriter {
  @Test
  public void testWrite() throws IOException {
    Schema writerSchema = Schema.create(Schema.Type.INT);
    GenericData dataModel = new ReflectData();
    CodecFactory compressionCodec = CodecFactory.nullCodec();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    TaskAttemptContext context = createMock(TaskAttemptContext.class);

    replay(context);
View Full Code Here

    AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
    AvroValue<TextStats> avroValue = new AvroValue<TextStats>(null);
    @SuppressWarnings("unchecked")
    AvroDatumConverter<AvroValue<TextStats>, ?> valueConverter
        = factory.create((Class<AvroValue<TextStats>>) avroValue.getClass());
    CodecFactory compressionCodec = CodecFactory.nullCodec();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();

    // Use a writer to generate a Avro container file in memory.
    // Write two records: <'apple', TextStats('apple')> and <'banana', TextStats('banana')>.
    AvroKeyValueRecordWriter<Text, AvroValue<TextStats>> writer
View Full Code Here

TOP

Related Classes of org.apache.avro.file.CodecFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.