Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.DataInputBuffer


        modelOut.write(object.unwrap());
        modelOut.write(object.unwrap());
        modelOut.write(object.unwrap());
        modelOut.close();

        DataInputBuffer input = new DataInputBuffer();
        input.reset(output.getData(), output.getLength());
        ModelInput<Object> modelIn = (ModelInput<Object>) type.getAnnotation(ModelInputLocation.class)
            .value()
            .getDeclaredConstructor(RecordParser.class)
            .newInstance(new TsvParser(new InputStreamReader(input, "UTF-8")));
        ModelWrapper copy = loader.newModel("Primitives");
        modelIn.readTo(copy.unwrap());
        assertThat(object.unwrap(), equalTo(copy.unwrap()));
        assertThat(input.read(), is(-1));
        modelIn.close();
    }
View Full Code Here


    dob.writeUTF("org.apache.hadoop.hbase.client.Scan");
    Bytes.writeByteArray(dob, new byte[]{'a'});
    // this is the dynamic protocol name
    dob.writeUTF(protocolName);

    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(dob.getData(), dob.getLength());

    Exec after = new Exec();
    after.setConf(HBaseConfiguration.create());
    after.readFields(dib);
    // no error thrown
View Full Code Here

    origToken.setMasterKeyId(321);
    origToken.setMaxDate(314);
    origToken.setSequenceNumber(12345);
   
    // clone origToken into newToken
    DataInputBuffer inBuf = new DataInputBuffer();
    DataOutputBuffer outBuf = new DataOutputBuffer();
    origToken.write(outBuf);
    inBuf.reset(outBuf.getData(), 0, outBuf.getLength());
    newToken.readFields(inBuf);
   
    // now test the fields
    assertEquals("alice", newToken.getUser().getUserName());
    assertEquals(new Text("bob"), newToken.getRenewer());
View Full Code Here

      Text renewer, Text realUser) throws IOException {
    TestDelegationTokenIdentifier dtid = new TestDelegationTokenIdentifier(
        owner, renewer, realUser);
    DataOutputBuffer out = new DataOutputBuffer();
    dtid.writeImpl(out);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    try {
      TestDelegationTokenIdentifier dtid2 =
          new TestDelegationTokenIdentifier();
      dtid2.readFields(in);
      assertTrue(dtid.equals(dtid2));
View Full Code Here

          SequenceFile.Reader.file(fpath));
      key = ReflectionUtils.newInstance(
          r.getKeyClass().asSubclass(WritableComparable.class), lconf);
      val = ReflectionUtils.newInstance(
          r.getValueClass().asSubclass(Writable.class), lconf);
      inbuf = new DataInputBuffer();
      outbuf = new DataOutputBuffer();
    }
View Full Code Here

 
  @Test
  public void testReadWriteReplicaState() {
    try {
      DataOutputBuffer out = new DataOutputBuffer();
      DataInputBuffer in = new DataInputBuffer();
      for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState
          .values()) {
        repState.write(out);
        in.reset(out.getData(), out.getLength());
        HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState
            .read(in);
        assertTrue("testReadWrite error !!!", repState == result);
        out.reset();
        in.reset();
      }
    } catch (Exception ex) {
      fail("testReadWrite ex error ReplicaState");
    }
  }
View Full Code Here

    double[] m = {1.1, 2.2, 3.3};
    Model<?> model = new NormalModel(new DenseVector(m), 3.3);
    DataOutputBuffer out = new DataOutputBuffer();
    model.write(out);
    Model<?> model2 = new NormalModel();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    model2.readFields(in);
    assertEquals("models", model.toString(), model2.toString());
  }
View Full Code Here

    double[] m = {1.1, 2.2, 3.3};
    Model<?> model = new SampledNormalModel(new DenseVector(m), 3.3);
    DataOutputBuffer out = new DataOutputBuffer();
    model.write(out);
    Model<?> model2 = new SampledNormalModel();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    model2.readFields(in);
    assertEquals("models", model.toString(), model2.toString());
  }
View Full Code Here

    double[] s = {3.3, 4.4, 5.5};
    Model<?> model = new AsymmetricSampledNormalModel(new DenseVector(m), new DenseVector(s));
    DataOutputBuffer out = new DataOutputBuffer();
    model.write(out);
    Model<?> model2 = new AsymmetricSampledNormalModel();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    model2.readFields(in);
    assertEquals("models", model.toString(), model2.toString());
  }
View Full Code Here

    double[] m = {1.1, 2.2, 3.3};
    DirichletCluster<?> cluster = new DirichletCluster(new NormalModel(new DenseVector(m), 4), 10);
    DataOutputBuffer out = new DataOutputBuffer();
    cluster.write(out);
    DirichletCluster<?> cluster2 = new DirichletCluster();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    cluster2.readFields(in);
    assertEquals("count", cluster.getTotalCount(), cluster2.getTotalCount());
    assertNotNull("model null", cluster2.getModel());
    assertEquals("model", cluster.getModel().toString(), cluster2.getModel().toString());
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.DataInputBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.