Examples of DataOutputBuffer


Examples of org.apache.hadoop.io.DataOutputBuffer

      // of connections
      synchronized (sendParamsLock) {
        Future senderFuture = SEND_PARAMS_EXECUTOR.submit(new Runnable() {
          @Override
          public void run() {
            DataOutputBuffer d = null;

            synchronized (Connection.this.out) {
              try {
                if (shouldCloseConnection.get()) {
                  return;
                }
                if (LOG.isDebugEnabled()) {
                  LOG.debug(getName() + " sending #" + call.id);
                }

                //for serializing the
                //data to be written
                d = new DataOutputBuffer();
                d.writeInt(call.id);
                call.param.write(d);
                byte[] data = d.getData();
                int dataLength = d.getLength();
                out.writeInt(dataLength);      //first put the data length
                out.write(data, 0, dataLength);//write the data
                out.flush();

              } catch (IOException e) {
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

      throw new IOException("Illegal codec!");
    }
    LOG.info("Created a Codec object of type: " + codecClass);

    // Generate data
    DataOutputBuffer data = new DataOutputBuffer();
    RandomDatum.Generator generator = new RandomDatum.Generator(seed);
    for(int i=0; i < count; ++i) {
      generator.next();
      RandomDatum key = generator.getKey();
      RandomDatum value = generator.getValue();
     
      key.write(data);
      value.write(data);
    }
    LOG.info("Generated " + count + " records");
   
    // Compress data
    DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
    CompressionOutputStream deflateFilter =
      codec.createOutputStream(compressedDataBuffer);
    DataOutputStream deflateOut =
      new DataOutputStream(new BufferedOutputStream(deflateFilter));
    deflateOut.write(data.getData(), 0, data.getLength());
    deflateOut.flush();
    deflateFilter.finish();
    LOG.info("Finished compressing data");
   
    // De-compress data
    DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
    deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
                                 compressedDataBuffer.getLength());
    CompressionInputStream inflateFilter =
      codec.createInputStream(deCompressedDataBuffer);
    DataInputStream inflateIn =
      new DataInputStream(new BufferedInputStream(inflateFilter));

    // Check
    DataInputBuffer originalData = new DataInputBuffer();
    originalData.reset(data.getData(), 0, data.getLength());
    DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
    for(int i=0; i < count; ++i) {
      RandomDatum k1 = new RandomDatum();
      RandomDatum v1 = new RandomDatum();
      k1.readFields(originalIn);
      v1.readFields(originalIn);
     
      RandomDatum k2 = new RandomDatum();
      RandomDatum v2 = new RandomDatum();
      k2.readFields(inflateIn);
      v2.readFields(inflateIn);
    }

    // De-compress data byte-at-a-time
    originalData.reset(data.getData(), 0, data.getLength());
    deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
                                 compressedDataBuffer.getLength());
    inflateFilter =
      codec.createInputStream(deCompressedDataBuffer);

    // Check
    originalIn = new DataInputStream(new BufferedInputStream(originalData));
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

    DataOutputStream fout = null;
    Compressor cmp = CodecPool.getCompressor(codec);
    try {
      fout = new DataOutputStream(codec.createOutputStream(
            fs.create(file, true), cmp));
      final DataOutputBuffer dob = new DataOutputBuffer(REC_SIZE * 4 / 3 + 4);
      int seq = 0;
      while (infLen > 0) {
        rand.nextBytes(b);
        final byte[] b64enc = b64.encode(b); // ensures rand printable, no LF
        dob.reset();
        dob.writeInt(seq);
        System.arraycopy(dob.getData(), 0, b64enc, 0, dob.getLength());
        fout.write(b64enc);
        fout.write('\n');
        ++seq;
        infLen -= b64enc.length;
      }
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

      key = ReflectionUtils.newInstance(r.getKeyClass().asSubclass(WritableComparable.class),
                                        getConf());
      val = ReflectionUtils.newInstance(r.getValueClass().asSubclass(Writable.class),
                                        getConf());
      inbuf = new DataInputBuffer();
      outbuf = new DataOutputBuffer();
    }
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

   * @param obj the object to serialize
   * @return the encoded string
   * @throws IOException
   */
  private static String encodeWritable(Writable obj) throws IOException {
    DataOutputBuffer buf = new DataOutputBuffer();
    obj.write(buf);
    Base64 encoder = new Base64(0, null, true);
    byte[] raw = new byte[buf.getLength()];
    System.arraycopy(buf.getData(), 0, raw, 0, buf.getLength());
    return encoder.encodeToString(raw);
  }
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

        object.set("type_date", new Date(2011, 3, 31));
        object.set("type_datetime", new DateTime(2011, 3, 31, 23, 30, 1));

        Writable writable = (Writable) object.unwrap();

        DataOutputBuffer output = new DataOutputBuffer();
        writable.write(output);

        Writable copy = (Writable) loader.newModel("Primitives").unwrap();
        DataInputBuffer input = new DataInputBuffer();
        input.reset(output.getData(), output.getLength());
        copy.readFields(input);

        assertThat(input.read(), is(-1));
        assertThat(writable, equalTo(copy));
    }
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

* Test helpders for writable objects.
*/
public class WritableTestRoot {

    static byte[] ser(Writable writable) throws IOException {
        DataOutputBuffer out = new DataOutputBuffer();
        writable.write(out);
        byte[] results = Arrays.copyOfRange(out.getData(), 0, out.getLength());
        return results;
    }
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

        byte[] results = Arrays.copyOfRange(out.getData(), 0, out.getLength());
        return results;
    }

    static byte[] ser(WritableRawComparable writable) throws IOException {
        DataOutputBuffer out = new DataOutputBuffer();
        writable.write(out);
        assertThat(writable.getSizeInBytes(out.getData(), 0), is(out.getLength()));
        byte[] results = Arrays.copyOfRange(out.getData(), 0, out.getLength());
        return results;
    }
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

    private FormatSpec spec(Format format, String string) {
        return new FormatSpec(format, string);
    }

    static byte[] ser(WritableRawComparable writable) throws IOException {
        DataOutputBuffer out = new DataOutputBuffer();
        writable.write(out);
        assertThat(writable.getSizeInBytes(out.getData(), 0), is(out.getLength()));
        byte[] results = Arrays.copyOfRange(out.getData(), 0, out.getLength());
        return results;
    }
View Full Code Here

Examples of org.apache.hadoop.io.DataOutputBuffer

        assertThat("パーティションが細かい", sequencialReadAve, greaterThan(500.0));
        assertThat("パーティションが粗い", sequencialReadAve, lessThan(10000.0));
    }

    static byte[] write(Writable writable) {
        DataOutputBuffer buffer = new DataOutputBuffer();
        buffer.reset();
        try {
            writable.write(buffer);
        } catch (IOException e) {
            throw new AssertionError(e);
        }
        return Arrays.copyOf(buffer.getData(), buffer.getLength());
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.