Package org.apache.hadoop.io.serializer

Examples of org.apache.hadoop.io.serializer.Deserializer.open()


    mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
    inputSplit = (InputSplit) ReflectionUtils
       .newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream)in);
    inputSplit = (InputSplit)deserializer.deserialize(inputSplit);
  }

  private Class<?> readClass(DataInput in) throws IOException {
    String className = Text.readString(in);
View Full Code Here


        try {
            Class splitClass = conf.getClassByName(splitClassName);
            SerializationFactory sf = new SerializationFactory(conf);
            // The correct call sequence for Deserializer is, we shall open, then deserialize, but we shall not close
            Deserializer d = sf.getDeserializer(splitClass);
            d.open((InputStream) is);
            wrappedSplits = new InputSplit[splitLen];
            for (int i = 0; i < splitLen; i++)
            {
                wrappedSplits[i] = (InputSplit)ReflectionUtils.newInstance(splitClass, conf);
                d.deserialize(wrappedSplits[i]);
View Full Code Here

    Serializer s = serializationFactory.getSerializer(klazz);
    Deserializer ds = serializationFactory.getDeserializer(klazz);

    try {
      s.open(outBuffer);
      ds.open(inBuffer);

      outBuffer.reset();
      s.serialize(obj);

      byte [] data = outBuffer.getData();
View Full Code Here

    mapperClass = (Class<? extends Mapper<?, ?, ?, ?>>) readClass(in);
    inputSplit = (InputSplit) ReflectionUtils
       .newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream)in);
    inputSplit = (InputSplit)deserializer.deserialize(inputSplit);
  }

  private Class<?> readClass(DataInput in) throws IOException {
    String className = StringInterner.weakIntern(Text.readString(in));
View Full Code Here

    inputFormatClass = (Class<? extends InputFormat<?, ?>>) readClass(in);
    Class<? extends InputSplit> inputSplitClass = (Class<? extends InputSplit>) readClass(in);
    inputSplit = (InputSplit) ReflectionUtils.newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream) in);
    inputSplit = (InputSplit) deserializer.deserialize(inputSplit);
  }

  private Class<?> readClass(DataInput in) throws IOException {
    String className = Text.readString(in);
View Full Code Here

    Serializer s = serializationFactory.getSerializer(klazz);
    Deserializer ds = serializationFactory.getDeserializer(klazz);

    try {
      s.open(outBuffer);
      ds.open(inBuffer);

      outBuffer.reset();
      s.serialize(obj);

      byte [] data = outBuffer.getData();
View Full Code Here

          clazz = writableVersionOfClazz;
        }

        // Now deserialize using the hadoop deserializer.
        Deserializer deserializer = getNewDeserializer(clazz);
        deserializer.open(new ByteArrayInputStream(bytes));
        Object o = clazz.newInstance();
        o = deserializer.deserialize(o);
        deserializer.close();

        if (writableVersionOfClazz != null) {
View Full Code Here

    inputProcessorFile = Text.readString(in);
    inputSplit = (InputSplit) ReflectionUtils
       .newInstance(inputSplitClass, conf);
    SerializationFactory factory = new SerializationFactory(conf);
    Deserializer deserializer = factory.getDeserializer(inputSplitClass);
    deserializer.open((DataInputStream)in);
    inputSplit = (InputSplit)deserializer.deserialize(inputSplit);
  }

  private Class<?> readClass(DataInput in) throws IOException {
    String className = Text.readString(in);
View Full Code Here

        try {
            Class splitClass = conf.getClassByName(splitClassName);
            SerializationFactory sf = new SerializationFactory(conf);
            // The correct call sequence for Deserializer is, we shall open, then deserialize, but we shall not close
            Deserializer d = sf.getDeserializer(splitClass);
            d.open((InputStream) is);
            wrappedSplits = new InputSplit[splitLen];
            for (int i = 0; i < splitLen; i++)
            {
                wrappedSplits[i] = (InputSplit)ReflectionUtils.newInstance(splitClass, conf);
                d.deserialize(wrappedSplits[i]);
View Full Code Here

          inputBuffer.reset(buffer.array(), offset, length);
        } else {
          throw new PangoolRuntimeException("Can't convert to OBJECT from instance " + objRecord.getClass());
        }
        if(customDeser != null) {
          customDeser.open(inputBuffer);
          tuple.set(pos, customDeser.deserialize(tuple.get(pos))); // TODO FIXME avro deserializer shouldn't reuse
                                                                   // objects sometimes (UNION ?)
          customDeser.close(); // TODO is this ok ?
        } else {
          // no custom deser , then use Hadoop serializers registered in "io.serializations"
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.