Package org.apache.hadoop.io.serializer

Examples of org.apache.hadoop.io.serializer.Serializer.open()


        
        //Checks if Serializer is NULL or not before calling open() method on it.        
        if (s == null) {
              throw new IllegalArgumentException("Could not find Serializer for class "+wrappedSplits[0].getClass()+". InputSplits must implement Writable.");
        }       
        s.open((OutputStream) os);
        for (int i = 0; i < wrappedSplits.length; i++)
        {
            // The correct call sequence for Serializer is, we shall open, then serialize, but we shall not close
            s.serialize(wrappedSplits[i]);
        }
View Full Code Here


    Text.writeString(out, inputFormatFile);
    Text.writeString(out, inputProcessorFile);
    SerializationFactory factory = new SerializationFactory(conf);
    Serializer serializer =
          factory.getSerializer(inputSplitClass);
    serializer.open((DataOutputStream)out);
    serializer.serialize(inputSplit);
  }

  public Configuration getConf() {
    return conf;
View Full Code Here

      if(ser == null) {
        throw new IOException("Serializer for class " + datum.getClass() + " not found");
      }
      serializers.put(datum.getClass(), ser);
    }
    ser.open(output);
    ser.serialize(datum);
    ser.close();
  }

  /**
 
View Full Code Here

      List<org.apache.hadoop.mapreduce.InputSplit> splits = input.getSplits(jobContext);
      rawSplits = new ArrayList<RawSplit>(splits.size());
      DataOutputBuffer buffer = new DataOutputBuffer();
      SerializationFactory factory = new SerializationFactory(orbConf);
      Serializer serializer = factory.getSerializer(splits.get(0).getClass());
      serializer.open(buffer);
      for (int i = 0; i < splits.size(); i++) {
        buffer.reset();
        serializer.serialize(splits.get(i));
        RawSplit rawSplit = new RawSplit();
        rawSplit.setClassName(splits.get(i).getClass().getName());
View Full Code Here

    Text.writeString(out, inputFormatClass.getName());
    Text.writeString(out, mapperClass.getName());
    SerializationFactory factory = new SerializationFactory(conf);
    Serializer serializer =
          factory.getSerializer(inputSplitClass);
    serializer.open((DataOutputStream)out);
    serializer.serialize(inputSplit);
  }

  public Configuration getConf() {
    return conf;
View Full Code Here

      Serializer serializer = serializers.get( type );

      if( serializer == null )
        {
        serializer = tupleSerialization.getNewSerializer( type );
        serializer.open( outputStream );
        serializers.put( type, serializer );
        }

      try
        {
View Full Code Here

      case OBJECT:
        Serializer customSer = customSerializers[i];
        DataOutputBuffer buffer = buffers[i];
        buffer.reset();
        if (customSer != null){
          customSer.open(buffer);
          customSer.serialize(obj);
          customSer.close(); //TODO is this safe ?
        } else {
          hadoopSer.ser(obj, buffer);
        }
View Full Code Here

      throws IOException {
    Class<? extends InputSplit> clazz = split.getClass().asSubclass(InputSplit.class);
    Text.writeString(out, clazz.getName());
    SerializationFactory factory = new SerializationFactory(conf);
    Serializer serializer = factory.getSerializer(clazz);
    serializer.open(out instanceof UncloseableDataOutputStream ? out : new UncloseableDataOutputStream(out));
    serializer.serialize(split);
  }

  public static InputSplit deserializeInputSplit(Configuration conf, DataInputStream in) throws IOException {
    String name = Text.readString(in);
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.