Examples of readFields()


Examples of org.apache.phoenix.schema.PTable.readFields()

    private static PTable deserializeTable(byte[] b) {
        ByteArrayInputStream stream = new ByteArrayInputStream(b);
        try {
            DataInputStream input = new DataInputStream(stream);
            PTable table = new PTableImpl();
            table.readFields(input);
            return table;
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {
            try {
View Full Code Here

Examples of org.apache.phoenix.schema.PTableImpl.readFields()

    private static PTable deserializeTable(byte[] b) {
        ByteArrayInputStream stream = new ByteArrayInputStream(b);
        try {
            DataInputStream input = new DataInputStream(stream);
            PTable table = new PTableImpl();
            table.readFields(input);
            return table;
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {
            try {
View Full Code Here

Examples of org.apache.phoenix.schema.RowKeySchema.readFields()

        try {
            int size = WritableUtils.readVInt(input);
            boolean isDataTableSalted = size < 0;
            size = Math.abs(size);
            RowKeySchema rowKeySchema = new RowKeySchema();
            rowKeySchema.readFields(input);
            maintainers = Lists.newArrayListWithExpectedSize(size);
            for (int i = 0; i < size; i++) {
                IndexMaintainer maintainer = new IndexMaintainer(rowKeySchema, isDataTableSalted);
                maintainer.readFields(input);
                maintainers.add(maintainer);
View Full Code Here

Examples of org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit.readFields()

            DataInput in = new DataInputStream(inputStream);
            PigSplit anotherSplit = new PigSplit();
            anotherSplit.setConf(conf);

            anotherSplit.readFields(in);

            Assert.assertEquals(700, anotherSplit.getLength());
            checkLocationOrdering(pigSplit.getLocations(), new String[] { "l5",
                    "l1", "l6", "l3", "l4" });
View Full Code Here

Examples of org.apache.pig.data.Tuple.readFields()

        // instantiate each object as a tuple.
        Tuple t1 = mFact.newTuple();
        Tuple t2 = mFact.newTuple();
        try {
            t1.readFields(new DataInputStream(new ByteArrayInputStream(b1, s1, l1)));
            t2.readFields(new DataInputStream(new ByteArrayInputStream(b2, s2, l2)));
        } catch (IOException ioe) {
            mLog.error("Unable to instantiate tuples for comparison: " + ioe.getMessage());
            throw new RuntimeException(ioe.getMessage(), ioe);
        }
View Full Code Here

Examples of org.apache.pig.impl.io.NullablePartitionWritable.readFields()

            wr.write(out);
            byte[] arr = baos.toByteArray();
            ByteArrayInputStream bais = new ByteArrayInputStream(arr);
            DataInputStream in = new DataInputStream(bais);
            NullablePartitionWritable re = new NullablePartitionWritable();
            re.readFields(in);
            assertEquals(re, wr);
        }

        // we are not doing any optimization to remove
        // parts of the "value" which are present in the "key" in this
View Full Code Here

Examples of org.apache.tez.runtime.library.common.shuffle.impl.ShuffleHeader.readFields()

      int responsePartition = -1;
      // Read the shuffle header
      String pathComponent = null;
      try {
        ShuffleHeader header = new ShuffleHeader();
        header.readFields(input);
        pathComponent = header.getMapId();

        srcAttemptId = pathToAttemptMap.get(pathComponent);
        compressedLength = header.getCompressedLength();
        decompressedLength = header.getUncompressedLength();
View Full Code Here

Examples of org.ar.domainspecific.tools.csv.CSVFileReader.readFields()

    ArrayList<ClassTop> map = new ArrayList<ClassTop>();
    String inputDirectory = "../org.ar.domainspecific/output-rank-csv/";
    CSVFileReader in = new CSVFileReader(inputDirectory + ontologyName
        + ".csv", ',');
    // read the first row
    Vector<String> fields = in.readFields();
    // skip the first row and read the second because the first one is the
    // header
    fields = in.readFields();

    String className;
View Full Code Here

Examples of org.lilyproject.mapreduce.RecordIdWritable.readFields()

        // Verify the binary length
        assertEquals(1 /* vint length */ + 1 /* record id type byte */ + "foo".length(), bos.toByteArray().length);

        RecordIdWritable writable2 = new RecordIdWritable();
        writable2.readFields(new DataInputStream(new ByteArrayInputStream(bos.toByteArray())));

        assertEquals(idGenerator.newRecordId("foo"), writable2.getRecordId());
    }
}
View Full Code Here

Examples of org.olap4cloud.impl.CubeIndexEntry.readFields()

    buf.reset(index, index.length);
    List<CubeIndexEntry> result = new ArrayList<CubeIndexEntry>();
    try {
      while(true) {
        CubeIndexEntry entry = new CubeIndexEntry();
        entry.readFields(buf);
        result.add(entry);
      }
    } catch(EOFException e) {
    }
    return result;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.