Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.DataInputBuffer$Buffer


    dob.writeUTF("org.apache.hadoop.hbase.client.Scan");
    Bytes.writeByteArray(dob, new byte[]{'a'});
    // this is the dynamic protocol name
    dob.writeUTF(protocolName);

    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(dob.getData(), dob.getLength());

    Exec after = new Exec();
    after.setConf(HBaseConfiguration.create());
    after.readFields(dib);
    // no error thrown
View Full Code Here


    origToken.setMasterKeyId(321);
    origToken.setMaxDate(314);
    origToken.setSequenceNumber(12345);
   
    // clone origToken into newToken
    DataInputBuffer inBuf = new DataInputBuffer();
    DataOutputBuffer outBuf = new DataOutputBuffer();
    origToken.write(outBuf);
    inBuf.reset(outBuf.getData(), 0, outBuf.getLength());
    newToken.readFields(inBuf);
   
    // now test the fields
    assertEquals("alice", newToken.getUser().getUserName());
    assertEquals(new Text("bob"), newToken.getRenewer());
View Full Code Here

      Text renewer, Text realUser) throws IOException {
    TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
        owner, renewer, realUser);
    DataOutputBuffer out = new DataOutputBuffer();
    dtid.writeImpl(out);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    try {
      TestDelegationTokenIdentifier dtid2 =
          new TestDelegationTokenIdentifier();
      dtid2.readFields(in);
      assertTrue(dtid.equals(dtid2));
View Full Code Here

          SequenceFile.Reader.file(fpath));
      key = ReflectionUtils.newInstance(
          r.getKeyClass().asSubclass(WritableComparable.class), lconf);
      val = ReflectionUtils.newInstance(
          r.getValueClass().asSubclass(Writable.class), lconf);
      inbuf = new DataInputBuffer();
      outbuf = new DataOutputBuffer();
    }
View Full Code Here

 
  @Test
  public void testReadWriteReplicaState() {
    try {
      DataOutputBuffer out = new DataOutputBuffer();
      DataInputBuffer in = new DataInputBuffer();
      for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState
          .values()) {
        repState.write(out);
        in.reset(out.getData(), out.getLength());
        HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState
            .read(in);
        assertTrue("testReadWrite error !!!", repState == result);
        out.reset();
        in.reset();
      }
    } catch (Exception ex) {
      fail("testReadWrite ex error ReplicaState");
    }
  }
View Full Code Here

    double[] m = {1.1, 2.2, 3.3};
    Model<?> model = new NormalModel(new DenseVector(m), 3.3);
    DataOutputBuffer out = new DataOutputBuffer();
    model.write(out);
    Model<?> model2 = new NormalModel();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    model2.readFields(in);
    assertEquals("models", model.toString(), model2.toString());
  }
View Full Code Here

    double[] m = {1.1, 2.2, 3.3};
    Model<?> model = new SampledNormalModel(new DenseVector(m), 3.3);
    DataOutputBuffer out = new DataOutputBuffer();
    model.write(out);
    Model<?> model2 = new SampledNormalModel();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    model2.readFields(in);
    assertEquals("models", model.toString(), model2.toString());
  }
View Full Code Here

    double[] s = {3.3, 4.4, 5.5};
    Model<?> model = new AsymmetricSampledNormalModel(new DenseVector(m), new DenseVector(s));
    DataOutputBuffer out = new DataOutputBuffer();
    model.write(out);
    Model<?> model2 = new AsymmetricSampledNormalModel();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    model2.readFields(in);
    assertEquals("models", model.toString(), model2.toString());
  }
View Full Code Here

    double[] m = {1.1, 2.2, 3.3};
    DirichletCluster<?> cluster = new DirichletCluster(new NormalModel(new DenseVector(m), 4), 10);
    DataOutputBuffer out = new DataOutputBuffer();
    cluster.write(out);
    DirichletCluster<?> cluster2 = new DirichletCluster();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    cluster2.readFields(in);
    assertEquals("count", cluster.getTotalCount(), cluster2.getTotalCount());
    assertNotNull("model null", cluster2.getModel());
    assertEquals("model", cluster.getModel().toString(), cluster2.getModel().toString());
  }
View Full Code Here

        valTransferBuffer = new BytesWritable();
        // TODO: remember the longest key in a TFile, and use it to replace
        // MAX_KEY_SIZE.
        keyBuffer = new byte[MAX_KEY_SIZE];
        keyDataInputStream = new DataInputBuffer();
        valueBufferInputStream = new ChunkDecoder();
        valueDataInputStream = new DataInputStream(valueBufferInputStream);

        if (beginLocation.compareTo(endLocation) >= 0) {
          currentLocation = new Location(endLocation);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.DataInputBuffer$Buffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.