Package org.apache.hadoop.io.serializer

Examples of org.apache.hadoop.io.serializer.Serializer


        os.writeInt(inputIndex);
        writeObject(targetOps, os);
        os.writeInt(wrappedSplits.length);
        os.writeUTF(wrappedSplits[0].getClass().getName());
        SerializationFactory sf = new SerializationFactory(conf);
        Serializer s =
            sf.getSerializer(wrappedSplits[0].getClass());
        s.open((OutputStream) os);
        for (int i = 0; i < wrappedSplits.length; i++)
        {
            // The correct call sequence for Serializer is, we shall open, then serialize, but we shall not close
            s.serialize(wrappedSplits[i]);
        }
       
    }
View Full Code Here


    Text.writeString(out, inputSplit.getClass().getName());
    if (inputSplit instanceof Writable) {
      ((Writable) inputSplit).write(out);
    } else {
      SerializationFactory factory = new SerializationFactory(conf);
      Serializer serializer = factory.getSerializer(inputSplit.getClass());
      serializer.open((OutputStream) out);
      serializer.serialize(inputSplit);
      serializer.close();
    }
  }
View Full Code Here

        os.writeInt(splitIndex);
        os.writeInt(inputIndex);
        writeObject(targetOps, os);
        os.writeUTF(wrappedSplit.getClass().getName());
        SerializationFactory sf = new SerializationFactory(conf);
        Serializer s =
            sf.getSerializer(wrappedSplit.getClass());
        // The correct call sequence for Serializer is, we shall open, then serialize, but we shall not close
        s.open((OutputStream) os);
        s.serialize(wrappedSplit);
       
    }
View Full Code Here

                   
          List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(jContext);
          rawSplits = new RawSplit[splits.size()];
          DataOutputBuffer buffer = new DataOutputBuffer();
          SerializationFactory factory = new SerializationFactory(conf);
          Serializer serializer =
            factory.getSerializer(splits.get(0).getClass());
          serializer.open(buffer);
          for (int i = 0; i < splits.size(); i++) {
            buffer.reset();
            serializer.serialize(splits.get(i));
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits.get(i).getClass().getName());
            rawSplit.setDataLength(splits.get(i).getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits.get(i).getLocations());
View Full Code Here

      return null;
    }

    Class klazz = obj.getClass();
    Object out = getInstance(klazz); // the output object to return.
    Serializer s = serializationFactory.getSerializer(klazz);
    Deserializer ds = serializationFactory.getDeserializer(klazz);

    try {
      s.open(outBuffer);
      ds.open(inBuffer);

      outBuffer.reset();
      s.serialize(obj);

      byte [] data = outBuffer.getData();
      int len = outBuffer.getLength();
      inBuffer.reset(data, len);

      out = ds.deserialize(out);

      return out;
    } finally {
      try {
        s.close();
      } catch (IOException ioe) {
        // ignore this; we're closing.
      }

      try {
View Full Code Here

                   
          List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(jContext);
          rawSplits = new RawSplit[splits.size()];
          DataOutputBuffer buffer = new DataOutputBuffer();
          SerializationFactory factory = new SerializationFactory(conf);
          Serializer serializer =
            factory.getSerializer(splits.get(0).getClass());
          serializer.open(buffer);
          for (int i = 0; i < splits.size(); i++) {
            buffer.reset();
            serializer.serialize(splits.get(i));
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits.get(i).getClass().getName());
            rawSplit.setDataLength(splits.get(i).getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits.get(i).getLocations());
View Full Code Here

  public void write(DataOutput out) throws IOException {
    out.writeInt(nodeIndex);
    bundle.write(out);
    Text.writeString(out, inputSplit.getClass().getName());
    SerializationFactory factory = new SerializationFactory(conf);
    Serializer serializer = factory.getSerializer(inputSplit.getClass());
    serializer.open((DataOutputStream) out);
    serializer.serialize(inputSplit);
  }
View Full Code Here

    Text.writeString(out, inputSplit.getClass().getName());
    if (inputSplit instanceof Writable) {
      ((Writable) inputSplit).write(out);
    } else {
      SerializationFactory factory = new SerializationFactory(conf);
      Serializer serializer = factory.getSerializer(inputSplit.getClass());
      serializer.open((OutputStream) out);
      serializer.serialize(inputSplit);
      serializer.close();
    }
  }
View Full Code Here

        for (int i = 0; i < wrappedSplits.length; i++)
        {
            //find out the index of the split class name
            int index = distinctSplitClassList.indexOf(wrappedSplits[i].getClass().getName());
            os.writeInt(index);
            Serializer s = sf.getSerializer(wrappedSplits[i].getClass());
            //Checks if Serializer is NULL or not before calling open() method on it.
            if (s == null) {
                throw new IllegalArgumentException("Could not find Serializer for class "+wrappedSplits[i].getClass()+". InputSplits must implement Writable.");
            }
            s.open((OutputStream) os);
            // The correct call sequence for Serializer is, we shall open, then serialize, but we shall not close
            s.serialize(wrappedSplits[i]);
        }

    }
View Full Code Here

                   
          List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(jContext);
          rawSplits = new RawSplit[splits.size()];
          DataOutputBuffer buffer = new DataOutputBuffer();
          SerializationFactory factory = new SerializationFactory(conf);
          Serializer serializer =
            factory.getSerializer(splits.get(0).getClass());
          serializer.open(buffer);
          for (int i = 0; i < splits.size(); i++) {
            buffer.reset();
            serializer.serialize(splits.get(i));
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits.get(i).getClass().getName());
            rawSplit.setDataLength(splits.get(i).getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits.get(i).getLocations());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.serializer.Serializer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.