Package org.apache.hadoop.io.serializer

Examples of org.apache.hadoop.io.serializer.SerializationFactory


   * @throws IOException */
  public static<T> void serialize(Configuration conf, DataOutput out
      , T obj, Class<T> objClass) throws IOException {

    if(serializationFactory == null) {
      serializationFactory = new SerializationFactory(getOrCreateConf(conf));
    }
    Serializer<T> serializer = serializationFactory.getSerializer(objClass);

    ByteBufferOutputStream os = new ByteBufferOutputStream();
    try {
View Full Code Here


   * available Hadoop serializations.
   * @throws IOException */
  public static<T> T deserialize(Configuration conf, DataInput in
      , T obj , Class<T> objClass) throws IOException {
    if(serializationFactory == null) {
      serializationFactory = new SerializationFactory(getOrCreateConf(conf));
    }
    Deserializer<T> deserializer = serializationFactory.getDeserializer(
        objClass);

    int length = WritableUtils.readVInt(in);
View Full Code Here

                           Progressable reporter)
      throws IOException {
      this.in = in;
      this.comparator = comparator;
      this.reporter = reporter;
      SerializationFactory serializationFactory = new SerializationFactory(conf);
      this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
      this.keyDeserializer.open(keyIn);
      this.valDeserializer = serializationFactory.getDeserializer(valClass);
      this.valDeserializer.open(this.valueIn);
      readNextKey();
      key = nextKey;
      nextKey = null; // force new instance creation
      hasNext = more;
View Full Code Here

                       ) throws InterruptedException, IOException{
    super(conf, taskid, output, committer, reporter);
    this.input = input;
    this.inputCounter = inputCounter;
    this.comparator = comparator;
    SerializationFactory serializationFactory = new SerializationFactory(conf);
    this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
    this.keyDeserializer.open(buffer);
    this.valueDeserializer = serializationFactory.getDeserializer(valueClass);
    this.valueDeserializer.open(buffer);
    hasMore = input.next();
  }
View Full Code Here

        ReflectionUtils.newInstance(taskContext.getInputFormatClass(), job);
    // rebuild the input split
    org.apache.hadoop.mapreduce.InputSplit split = null;
    DataInputBuffer splitBuffer = new DataInputBuffer();
    splitBuffer.reset(rawSplit.getBytes(), 0, rawSplit.getLength());
    SerializationFactory factory = new SerializationFactory(job);
    Deserializer<? extends org.apache.hadoop.mapreduce.InputSplit>
      deserializer =
        (Deserializer<? extends org.apache.hadoop.mapreduce.InputSplit>)
        factory.getDeserializer(job.getClassByName(splitClass));
    deserializer.open(splitBuffer);
    split = deserializer.deserialize(null);

    org.apache.hadoop.mapreduce.RecordReader<INKEY,INVALUE> input =
      new NewTrackingRecordReader<INKEY,INVALUE>
View Full Code Here

                                                 array.length);
    try {
      if (array.length != 0) {
        DataOutputBuffer buffer = new DataOutputBuffer();
        RawSplit rawSplit = new RawSplit();
        SerializationFactory factory = new SerializationFactory(conf);
        Serializer<T> serializer =
          factory.getSerializer((Class<T>) array[0].getClass());
        serializer.open(buffer);
        for(T split: array) {
          rawSplit.setClassName(split.getClass().getName());
          buffer.reset();
          serializer.serialize(split);
View Full Code Here

              ReflectionUtils.newInstance(jContext.getInputFormatClass(), jContext.getJobConf());
                   
          List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(jContext);
          rawSplits = new RawSplit[splits.size()];
          DataOutputBuffer buffer = new DataOutputBuffer();
          SerializationFactory factory = new SerializationFactory(conf);
          Serializer serializer =
            factory.getSerializer(splits.get(0).getClass());
          serializer.open(buffer);
          for (int i = 0; i < splits.size(); i++) {
            buffer.reset();
            serializer.serialize(splits.get(i));
            RawSplit rawSplit = new RawSplit();
View Full Code Here

      LOG.info("record buffer = " + softRecordLimit + "/" + kvoffsets.length);
      // k/v serialization
      comparator = job.getOutputKeyComparator();
      keyClass = (Class<K>)job.getMapOutputKeyClass();
      valClass = (Class<V>)job.getMapOutputValueClass();
      serializationFactory = new SerializationFactory(job);
      keySerializer = serializationFactory.getSerializer(keyClass);
      keySerializer.open(bb);
      valSerializer = serializationFactory.getSerializer(valClass);
      valSerializer.open(bb);
      // counters
View Full Code Here

                           Progressable reporter)
      throws IOException {
      this.in = in;
      this.comparator = comparator;
      this.reporter = reporter;
      SerializationFactory serializationFactory = new SerializationFactory(conf);
      this.keyDeserializer = serializationFactory.getDeserializer(keyClass);
      this.keyDeserializer.open(keyIn);
      this.valDeserializer = serializationFactory.getDeserializer(valClass);
      this.valDeserializer.open(this.valueIn);
      readNextKey();
      key = nextKey;
      nextKey = null; // force new instance creation
      hasNext = more;
View Full Code Here

    return allTaskSplitMetaInfo;
  }

  public static org.apache.hadoop.mapreduce.InputSplit getNewSplitDetailsFromEvent(
      MRSplitProto splitProto, Configuration conf) throws IOException {
    SerializationFactory serializationFactory = new SerializationFactory(conf);
    return MRInputHelpers.createNewFormatSplitFromUserPayload(
        splitProto, serializationFactory);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.serializer.SerializationFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.