Package org.apache.cassandra.io

Examples of org.apache.cassandra.io.DataInputBuffer


            String file = Table.TableMetadata.getFileName();
            File f = new File(file);
            if ( f.exists() )
            {
                DataOutputBuffer bufOut = new DataOutputBuffer();
                DataInputBuffer bufIn = new DataInputBuffer();
               
                if ( reader_ == null )
                {
                    reader_ = SequenceFile.reader(file);
                }
               
                while ( !reader_.isEOF() )
                {
                    /* Read the metadata info. */
                    reader_.next(bufOut);
                    bufIn.reset(bufOut.getData(), bufOut.getLength());

                    /* The key is the table name */
                    String key = bufIn.readUTF();
                    /* read the size of the data we ignore this value */
                    bufIn.readInt();
                    tableMetadata_ = Table.TableMetadata.serializer().deserialize(bufIn);
                    break;
                }       
            }           
        }
View Full Code Here


         * receiving end.
        */
        public void doVerb(Message message)
        {
            byte[] body = (byte[])message.getMessageBody()[0];
            DataInputBuffer bufIn = new DataInputBuffer();
            bufIn.reset(body, body.length);
           
            try
            {
                BootstrapInitiateMessage biMsg = BootstrapInitiateMessage.serializer().deserialize(bufIn);
                StreamContextManager.StreamContext[] streamContexts = biMsg.getStreamContext();               
View Full Code Here

    private void doRecovery(Stack<File> filesNeeded, byte[] header) throws IOException
    {
        Table table = Table.open(table_);

        DataInputBuffer bufIn = new DataInputBuffer();
        DataOutputBuffer bufOut = new DataOutputBuffer();       

        while ( !filesNeeded.isEmpty() )
        {
            File file = filesNeeded.pop();
            // IFileReader reader = SequenceFile.bufferedReader(file.getAbsolutePath(), DatabaseDescriptor.getLogFileSizeThreshold());
            IFileReader reader = SequenceFile.reader(file.getAbsolutePath());
            try
            {
                Map<String, Row> rows = new HashMap<String, Row>();
                reader.readDirect(header);
                /* deserialize the commit log header */
                bufIn.reset(header, 0, header.length);
                CommitLogHeader clHeader = CommitLogHeader.serializer().deserialize(bufIn);
                /* seek to the lowest position */
                int lowPos = CommitLogHeader.getLowestPosition(clHeader);
                /*
                 * If lowPos == 0 then we need to skip the processing of this
                 * file.
                */
                if (lowPos == 0)
                    break;
                else
                    reader.seek(lowPos);

                /* read the logs populate RowMutation and apply */
                while ( !reader.isEOF() )
                {
                    bufOut.reset();
                    long bytesRead = reader.next(bufOut);
                    if ( bytesRead == -1 )
                        break;

                    bufIn.reset(bufOut.getData(), bufOut.getLength());
                    /* Skip over the commit log key portion */
                    bufIn.readUTF();
                    /* Skip over data size */
                    bufIn.readInt();
                   
                    /* read the commit log entry */
                    try
                    {                       
                        Row row = Row.serializer().deserialize(bufIn);
View Full Code Here

        byte[] bytes = new byte[CommitLogHeader.size(Integer.parseInt(args[0]))];
        for ( File file : files )
        {
            CommitLog clog = new CommitLog( file );
            clog.readCommitLogHeader(file.getAbsolutePath(), bytes);
            DataInputBuffer bufIn = new DataInputBuffer();
            bufIn.reset(bytes, 0, bytes.length);
            CommitLogHeader clHeader = CommitLogHeader.serializer().deserialize(bufIn);
            /*
            StringBuilder sb = new StringBuilder("");
            for ( byte b : bytes )
            {
View Full Code Here

    {
        SSTable ssTable = new SSTable("C:\\Engagements\\Cassandra\\Table-Test-1-Data.db")
        for ( int i = 100; i < 1000; ++i )
        {
            String key = Integer.toString(i);           
            DataInputBuffer bufIn = ssTable.next(key, "Test:C");
            ColumnFamily cf = ColumnFamily.serializer().deserialize(bufIn);
            if ( cf != null )
            {           
                System.out.println("KEY:" + key);
                System.out.println(cf.name());
View Full Code Here

        fos.close();
       
        FileInputStream fis = new FileInputStream("C:\\Engagements\\bf.dat");
        byte[] bytes = new byte[fis.available()];
        fis.read(bytes);
        DataInputBuffer bufIn = new DataInputBuffer();
        bufIn.reset(bytes, bytes.length );
        BloomFilter bf2 = BloomFilter.serializer().deserialize(bufIn);
       
        int count = 0;
        for ( int i = 0; i < 1024*1024; ++i )
        {
View Full Code Here

        DataOutputBuffer bufOut = new DataOutputBuffer();
        reader_.next(bufOut);

        if ( bufOut.getLength() > 0 )
        {
            DataInputBuffer bufIn = new DataInputBuffer();
            bufIn.reset(bufOut.getData(), bufOut.getLength());
            /*
             * This buffer contains key and value so we need to strip
             * certain parts
           */
            // read the key
            bufIn.readUTF();
            // read the data length and then deserialize
            bufIn.readInt();
            try
            {
                systemRow_ = Row.serializer().deserialize(bufIn);
            }
            catch ( IOException e )
View Full Code Here

     * Populate the list of rows from each of the messages
     * Check to see if there is a digest query. If a digest
         * query exists then we need to compare the digest with
         * the digest of the data that is received.
        */
        DataInputBuffer bufIn = new DataInputBuffer();
    for (Message response : responses)
    {                     
            byte[] body = response.getMessageBody();
            bufIn.reset(body, body.length);
            ReadResponse result = ReadResponse.serializer().deserialize(bufIn);
            if (result.isDigestQuery())
            {
                digest = result.digest();
                isDigestQuery = true;
View Full Code Here

  {
        boolean isDataPresent = false;
        for (Message response : responses)
        {
            byte[] body = response.getMessageBody();
            DataInputBuffer bufIn = new DataInputBuffer();
            bufIn.reset(body, body.length);
            try
            {
                ReadResponse result = ReadResponse.serializer().deserialize(bufIn);
                if (!result.isDigestQuery())
                {
                    isDataPresent = true;
                }
                bufIn.close();
            }
            catch (IOException ex)
            {
                logger_.info(LogUtil.throwableToString(ex));
            }
View Full Code Here

    }

    public static RangeSliceCommand read(Message message) throws IOException
    {
        byte[] bytes = message.getMessageBody();
        DataInputBuffer dib = new DataInputBuffer();
        dib.reset(bytes, bytes.length);
        return serializer.deserialize(new DataInputStream(dib));
    }
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.DataInputBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.