Package org.apache.cassandra.io.util

Examples of org.apache.cassandra.io.util.DataOutputBuffer


        this.max_keys = max_keys;
    }

    public Message getMessage() throws IOException
    {
        DataOutputBuffer dob = new DataOutputBuffer();
        serializer.serialize(this, dob);
        return new Message(FBUtilities.getLocalAddress(),
                           StorageService.Verb.RANGE_SLICE,
                           Arrays.copyOf(dob.getData(), dob.getLength()));
    }
View Full Code Here


    public void updateDigest(MessageDigest digest)
    {
        digest.update(name.duplicate());
        digest.update(value.duplicate());

        DataOutputBuffer buffer = new DataOutputBuffer();
        try
        {
            buffer.writeLong(timestamp);
            buffer.writeByte(ColumnSerializer.EXPIRATION_MASK);
            buffer.writeInt(timeToLive);
        }
        catch (IOException e)
        {
            throw new RuntimeException(e);
        }
        digest.update(buffer.getData(), 0, buffer.getLength());
    }
View Full Code Here

        mi.old_version.bytes(UUIDGen.decompose(lastVersion));
        mi.new_version = new org.apache.cassandra.utils.avro.UUID();
        mi.new_version.bytes(UUIDGen.decompose(newVersion));
        mi.classname = new org.apache.avro.util.Utf8(this.getClass().getName());
        // TODO: Avro RowMutation serialization?
        DataOutputBuffer dob = new DataOutputBuffer();
        try
        {
            RowMutation.serializer().serialize(rm, dob);
        }
        catch (IOException e)
        {
            throw new RuntimeException(e);
        }
        mi.row_mutation = ByteBuffer.wrap(dob.asByteArray());

        // sub deflate
        this.subdeflate(mi);

        // serialize
View Full Code Here

    public void updateDigest(MessageDigest digest)
    {
        assert name_ != null;
        digest.update(name_.duplicate());
        DataOutputBuffer buffer = new DataOutputBuffer();
        try
        {
            buffer.writeLong(markedForDeleteAt.get());
        }
        catch (IOException e)
        {
            throw new RuntimeException(e);
        }
        digest.update(buffer.getData(), 0, buffer.getLength());
        for (IColumn column : columns_.values())
        {
            column.updateDigest(digest);
        }
    }
View Full Code Here

    public void updateDigest(MessageDigest digest)
    {
        digest.update(name.duplicate());
        digest.update(value.duplicate());

        DataOutputBuffer buffer = new DataOutputBuffer();
        try
        {
            buffer.writeLong(timestamp);
            buffer.writeByte((isMarkedForDelete()) ? ColumnSerializer.DELETION_MASK : 0);
        }
        catch (IOException e)
        {
            throw new RuntimeException(e);
        }
        digest.update(buffer.getData(), 0, buffer.getLength());
    }
View Full Code Here

    }

    private ReadCommand serializeAndDeserializeReadMessage(ReadCommand rm) throws IOException
    {
        ReadCommandSerializer rms = ReadCommand.serializer();
        DataOutputBuffer dos = new DataOutputBuffer();
        ByteArrayInputStream bis;

        rms.serialize(rm, dos);
        bis = new ByteArrayInputStream(dos.getData(), 0, dos.getLength());
        return rms.deserialize(new DataInputStream(bis));
    }
View Full Code Here

    {
        ColumnFamily cf;

        cf = ColumnFamily.create("Keyspace1", "Standard1");
        cf.addColumn(column("C", "v", 1));
        DataOutputBuffer bufOut = new DataOutputBuffer();
        ColumnFamily.serializer().serialize(cf, bufOut);

        ByteArrayInputStream bufIn = new ByteArrayInputStream(bufOut.getData(), 0, bufOut.getLength());
        cf = ColumnFamily.serializer().deserialize(new DataInputStream(bufIn));
        assert cf != null;
        assert cf.metadata().cfName.equals("Standard1");
        assert cf.getSortedColumns().size() == 1;
    }
View Full Code Here

            map.put(Integer.toString(i), "Avinash Lakshman is a good man: " + i);
        }

        // write
        cf = ColumnFamily.create("Keyspace1", "Standard1");
        DataOutputBuffer bufOut = new DataOutputBuffer();
        for (String cName : map.navigableKeySet())
        {
            cf.addColumn(column(cName, map.get(cName), 314));
        }
        ColumnFamily.serializer().serialize(cf, bufOut);

        // verify
        ByteArrayInputStream bufIn = new ByteArrayInputStream(bufOut.getData(), 0, bufOut.getLength());
        cf = ColumnFamily.serializer().deserialize(new DataInputStream(bufIn));
        for (String cName : map.navigableKeySet())
        {
            ByteBuffer val = cf.getColumn(ByteBufferUtil.bytes(cName)).value();
            assert new String(val.array(),val.position(),val.remaining()).equals(map.get(cName));
View Full Code Here

    }

    public static Filter testSerialize(LegacyBloomFilter f) throws IOException
    {
        f.add(ByteBufferUtil.bytes("a"));
        DataOutputBuffer out = new DataOutputBuffer();
        f.serializer().serialize(f, out);

        ByteArrayInputStream in = new ByteArrayInputStream(out.getData(), 0, out.getLength());
        LegacyBloomFilter f2 = f.serializer().deserialize(new DataInputStream(in));

        assert f2.isPresent(ByteBufferUtil.bytes("a"));
        assert !f2.isPresent(ByteBufferUtil.bytes("b"));
        return f2;
View Full Code Here

                break;
            }

            AbstractCompactedRow row1 = ci1.next();
            AbstractCompactedRow row2 = ci2.next();
            DataOutputBuffer out1 = new DataOutputBuffer();
            DataOutputBuffer out2 = new DataOutputBuffer();
            row1.write(out1);
            row2.write(out2);

            File tmpFile1 = File.createTempFile("lcrt1", null);
            File tmpFile2 = File.createTempFile("lcrt2", null);

            tmpFile1.deleteOnExit();
            tmpFile2.deleteOnExit();

            new FileOutputStream(tmpFile1).write(out1.getData()); // writing data from row1
            new FileOutputStream(tmpFile2).write(out2.getData()); // writing data from row2

            MappedFileDataInput in1 = new MappedFileDataInput(new FileInputStream(tmpFile1), tmpFile1.getAbsolutePath(), 0);
            MappedFileDataInput in2 = new MappedFileDataInput(new FileInputStream(tmpFile2), tmpFile2.getAbsolutePath(), 0);

            // key isn't part of what CompactedRow writes, that's done by SSTW.append

            // row size can differ b/c of bloom filter counts being different
            long rowSize1 = SSTableReader.readRowSize(in1, sstables.iterator().next().descriptor);
            long rowSize2 = SSTableReader.readRowSize(in2, sstables.iterator().next().descriptor);
            assertEquals(out1.getLength(), rowSize1 + 8);
            assertEquals(out2.getLength(), rowSize2 + 8);
            // bloom filter
            IndexHelper.defreezeBloomFilter(in1, rowSize1, false);
            IndexHelper.defreezeBloomFilter(in2, rowSize2, false);
            // index
            int indexSize1 = in1.readInt();
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.util.DataOutputBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.