Package org.apache.hadoop.io.serializer

Examples of org.apache.hadoop.io.serializer.Serializer


      //
      // Write some data out to a flat file
      //
      FileInputFormat.setInputPaths(job, dir);
      ds = fs.create(file);
      Serializer serializer = new WritableSerialization()
          .getSerializer(Writable.class);

      // construct some data and write it
      serializer.open(ds);
      for (int i = 0; i < 10; i++) {
        serializer.serialize(new RecordTestObj("Hello World! "
            + String.valueOf(i), i));
      }
      serializer.close();

      //
      // Construct the reader
      //
      FileInputFormat<Void, FlatFileInputFormat.RowContainer<Writable>> format =
View Full Code Here


  public void write(DataOutput out) throws IOException {
    Text.writeString(out, inputSplitClass.getName());
    Text.writeString(out, inputFormatClass.getName());
    Text.writeString(out, mapperClass.getName());
    SerializationFactory factory = new SerializationFactory(conf);
    Serializer serializer =
          factory.getSerializer(inputSplitClass);
    serializer.open((DataOutputStream)out);
    serializer.serialize(inputSplit);
  }
View Full Code Here

    out.writeInt(nodeIndex);
    conf.write(out);
    Text.writeString(out, inputFormatClass.getName());
    Text.writeString(out, inputSplit.getClass().getName());
    SerializationFactory factory = new SerializationFactory(conf);
    Serializer serializer = factory.getSerializer(inputSplit.getClass());
    serializer.open((DataOutputStream) out);
    serializer.serialize(inputSplit);
  }
View Full Code Here

      return null;
    }

    Class klazz = obj.getClass();
    Object out = null;
    Serializer s = serializationFactory.getSerializer(klazz);
    Deserializer ds = serializationFactory.getDeserializer(klazz);

    try {
      s.open(outBuffer);
      ds.open(inBuffer);

      outBuffer.reset();
      s.serialize(obj);

      byte [] data = outBuffer.getData();
      int len = outBuffer.getLength();
      inBuffer.reset(data, len);

      out = ds.deserialize(out);

      return out;
    } finally {
      try {
        s.close();
      } catch (IOException ioe) {
        // ignore this; we're closing.
      }

      try {
View Full Code Here

   * Serializes o by getting a Hadoop Serializer based of the object's class.
   * @param o object to serialize
   * @return the serialized byte[] array.
   */
  private byte[] internalSerialize(Object o) {
    Serializer serializer = getNewSerializer(o.getClass());

    // Write the object into an in-memory byte array.
    ByteArrayOutputStream byteOs = new ByteArrayOutputStream();
    try {
      serializer.open(byteOs);
      serializer.serialize(o);
      serializer.close();
    } catch (IOException ex) {
      // This should never happen since we're writing to an in-memory ByteArrayOutputStream.
      throw new RuntimeException(ex);
    }
    return byteOs.toByteArray();
View Full Code Here

  public void write(DataOutput out) throws IOException {
    Text.writeString(out, inputSplitClass.getName());
    Text.writeString(out, inputFormatFile);
    Text.writeString(out, inputProcessorFile);
    SerializationFactory factory = new SerializationFactory(conf);
    Serializer serializer =
          factory.getSerializer(inputSplitClass);
    serializer.open((DataOutputStream)out);
    serializer.serialize(inputSplit);
  }
View Full Code Here

          continue;
        }
      }

      // At this point we know that both values are not null.
      Serializer serializer = (serializers == null) ? null : serializers[i];
      int comparison = compareObjects(o1, o2, e.getCustomComparator(), field.getType(), serializer);
      if (comparison != 0) {
        return (e.getOrder() == Order.ASC ? comparison : -comparison);
      }
    }
View Full Code Here

        os.writeInt(inputIndex);
        writeObject(targetOps, os);
        os.writeInt(wrappedSplits.length);
        os.writeUTF(wrappedSplits[0].getClass().getName());
        SerializationFactory sf = new SerializationFactory(conf);
        Serializer s =
            sf.getSerializer(wrappedSplits[0].getClass());
        s.open((OutputStream) os);
        for (int i = 0; i < wrappedSplits.length; i++)
        {
            // The correct call sequence for Serializer is, we shall open, then serialize, but we shall not close
            s.serialize(wrappedSplits[i]);
        }
       
    }
View Full Code Here

      case DOUBLE:
      case BYTES:
        record.put(i, obj); //optimistic
        break;
      case OBJECT:
        Serializer customSer = customSerializers[i];
        DataOutputBuffer buffer = buffers[i];
        buffer.reset();
        if (customSer != null){
          customSer.open(buffer);
          customSer.serialize(obj);
          customSer.close(); //TODO is this safe ?
        } else {
          hadoopSer.ser(obj, buffer);
        }
        //TODO this byteBuffer instances should be cached and reused
        ByteBuffer byteBuffer = ByteBuffer.wrap(buffer.getData(), 0,buffer.getLength());
View Full Code Here

    for(int i = 0; i < c.getElements().size(); i++) {
      Field field = schema.getField(i);
      SortElement e = c.getElements().get(i);
      Object o1 = w1.get(index1[i]);
      Object o2 = w2.get(index2[i]);
      Serializer serializer = (serializers == null) ? null : serializers[i];
      int comparison = compareObjects(o1, o2, e.getCustomComparator(), field.getType(),serializer);
      if(comparison != 0) {
        return(e.getOrder() == Order.ASC ? comparison : -comparison);
      }
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.serializer.Serializer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.