Package org.apache.cassandra.io

Examples of org.apache.cassandra.io.DataOutputBuffer


        }

        public CommitLog.CommitLogContext call() throws Exception
        {
            long currentPosition = -1L;
            DataOutputBuffer cfBuffer = new DataOutputBuffer();
            try
            {
                /* serialize the row */
                Row.serializer().serialize(row, cfBuffer);
                currentPosition = logWriter_.getFilePointer();
                CommitLogContext cLogCtx = new CommitLogContext(logFile_, currentPosition);
                /* Update the header */
                maybeUpdateHeader(row);
                logWriter_.writeLong(cfBuffer.getLength());
                logWriter_.write(cfBuffer.getData(), 0, cfBuffer.getLength());
                maybeRollLog();
                return cLogCtx;
            }
            catch (IOException e)
            {
View Full Code Here


            while ( (line = bufReader.readLine()) != null )
            {
                /* After accumulating count_ keys reset the bloom filter. */
                if ( keyCount > 0 && keyCount % count_ == 0 )
                {                      
                    DataOutputBuffer bufOut = new DataOutputBuffer();
                    BloomFilter.serializer().serialize(bf, bufOut);
                    System.out.println("Finished serializing the bloom filter");
                    buffers.add(bufOut);
                    bf = new BloomFilter(count_, 8);
                }
                line = line.trim();               
                bf.add(line);
                ++keyCount;
            }
        }
       
        /* Add the bloom filter assuming the last one was left out */
        DataOutputBuffer bufOut = new DataOutputBuffer();
        BloomFilter.serializer().serialize(bf, bufOut);
        buffers.add(bufOut);
       
       
        int size = buffers.size();
        for ( int i = 0; i < size; ++i )
        {
            DataOutputBuffer buffer = buffers.get(i);
            String file = args[1] + File.separator + "Bloom-Filter-" + i + ".dat";
            RandomAccessFile raf = new RandomAccessFile(file, "rw");
            raf.write(buffer.getData(), 0, buffer.getLength());
            raf.close();
            buffer.close();
        }
        System.out.println("Done writing the bloom filter to disk");
    }
View Full Code Here

        this.maxResults = maxResults;
    }

    public Message getMessage() throws IOException
    {
        DataOutputBuffer dob = new DataOutputBuffer();
        serializer.serialize(this, dob);
        return new Message(StorageService.getLocalStorageEndPoint(),
                           StorageService.readStage_,
                           StorageService.rangeVerbHandler_,
                           Arrays.copyOf(dob.getData(), dob.getLength()));
    }
View Full Code Here

        }
    }
    public static Message createMessage(String Keyspace, String Key, String CFName, List<ColumnFamily> ColumnFamiles)
    {
        ColumnFamily baseColumnFamily;
        DataOutputBuffer bufOut = new org.apache.cassandra.io.DataOutputBuffer();
        RowMutation rm;
        Message message;
        Column column;

        /* Get the first column family from list, this is just to get past validation */
        baseColumnFamily = new ColumnFamily(CFName, "Standard",DatabaseDescriptor.getComparator(Keyspace, CFName), DatabaseDescriptor.getSubComparator(Keyspace, CFName));
       
        for(ColumnFamily cf : ColumnFamiles) {
            bufOut.reset();
            try
            {
                ColumnFamily.serializer().serializeWithIndexes(cf, bufOut);
                byte[] data = new byte[bufOut.getLength()];
                System.arraycopy(bufOut.getData(), 0, data, 0, bufOut.getLength());

                column = new Column(cf.name().getBytes("UTF-8"), data, 0, false);
                baseColumnFamily.addColumn(column);
            }
            catch (IOException e)
View Full Code Here

        Token mid = part.midpoint(min, min);
        validator.prepare();

        // add a row with the minimum token
        validator.add(new CompactedRow(new DecoratedKey(min, "nonsense!"),
                                       new DataOutputBuffer()));

        // and a row after it
        validator.add(new CompactedRow(new DecoratedKey(mid, "inconceivable!"),
                                       new DataOutputBuffer()));
        validator.complete();

        // confirm that the tree was validated
        assert null != validator.tree.hash(new Range(min, min));
    }
View Full Code Here

    public void testExportSimpleCf() throws IOException
    {
        File tempSS = createTemporarySSTable("Keyspace1", "Standard1");
        ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Standard1");
        IPartitioner<?> partitioner = DatabaseDescriptor.getPartitioner();
        DataOutputBuffer dob = new DataOutputBuffer();
        SSTableWriter writer = new SSTableWriter(tempSS.getPath(), 2, partitioner);
       
        // Add rowA
        cfamily.addColumn(new QueryPath("Standard1", null, "colA".getBytes()), "valA".getBytes(), 1, false);
        ColumnFamily.serializer().serializeWithIndexes(cfamily, dob);
        writer.append(partitioner.decorateKey("rowA"), dob);
        dob.reset();
        cfamily.clear();
       
        // Add rowB
        cfamily.addColumn(new QueryPath("Standard1", null, "colB".getBytes()), "valB".getBytes(), 1, false);
        ColumnFamily.serializer().serializeWithIndexes(cfamily, dob);
        writer.append(partitioner.decorateKey("rowB"), dob);
        dob.reset();
        cfamily.clear();
    
        SSTableReader reader = writer.closeAndOpenReader(0);
       
        // Export to JSON and verify
View Full Code Here

    public void testExportSuperCf() throws IOException
    {
        File tempSS = createTemporarySSTable("Keyspace1", "Super4");
        ColumnFamily cfamily = ColumnFamily.create("Keyspace1", "Super4");
        IPartitioner<?> partitioner = DatabaseDescriptor.getPartitioner();
        DataOutputBuffer dob = new DataOutputBuffer();
        SSTableWriter writer = new SSTableWriter(tempSS.getPath(), 2, partitioner);
       
        // Add rowA
        cfamily.addColumn(new QueryPath("Super4", "superA".getBytes(), "colA".getBytes()), "valA".getBytes(), 1, false);
        ColumnFamily.serializer().serializeWithIndexes(cfamily, dob);
        writer.append(partitioner.decorateKey("rowA"), dob);
        dob.reset();
        cfamily.clear();
       
        // Add rowB
        cfamily.addColumn(new QueryPath("Super4", "superB".getBytes(), "colB".getBytes()), "valB".getBytes(), 1, false);
        ColumnFamily.serializer().serializeWithIndexes(cfamily, dob);
        writer.append(partitioner.decorateKey("rowB"), dob);
        dob.reset();
        cfamily.clear();
    
        SSTableReader reader = writer.closeAndOpenReader(0);
       
        // Export to JSON and verify
View Full Code Here

                CommitLogContext cLogCtx = new CommitLogContext(logFile_, currentPosition);
                /* Update the header */
                maybeUpdateHeader(rowMutation);
                if (serializedRow instanceof DataOutputBuffer)
                {
                    DataOutputBuffer buffer = (DataOutputBuffer) serializedRow;
                    logWriter_.writeLong(buffer.getLength());
                    logWriter_.write(buffer.getData(), 0, buffer.getLength());
                }
                else
                {
                    assert serializedRow instanceof byte[];
                    byte[] bytes = (byte[]) serializedRow;
View Full Code Here

    throws IOException, ParseException
    {
        ColumnFamily cfamily = ColumnFamily.create(keyspace, cf);
        String cfType = cfamily.type();    // Super or Standard
        IPartitioner<?> partitioner = DatabaseDescriptor.getPartitioner();
        DataOutputBuffer dob = new DataOutputBuffer();
       
        try
        {
            JSONObject json = (JSONObject)JSONValue.parseWithException(new FileReader(jsonFile));
           
            SSTableWriter writer = new SSTableWriter(ssTablePath, json.size(), partitioner);
            List<DecoratedKey<?>> decoratedKeys = new ArrayList<DecoratedKey<?>>();
           
            for (String key : (Set<String>)json.keySet())
                decoratedKeys.add(partitioner.decorateKey(key));
            Collections.sort(decoratedKeys);

            for (DecoratedKey<?> rowKey : decoratedKeys)
            {
                if (cfType.equals("Super"))
                    addToSuperCF((JSONObject)json.get(rowKey.key), cfamily);
                else
                    addToStandardCF((JSONArray)json.get(rowKey.key), cfamily);
                          
                ColumnFamily.serializer().serializeWithIndexes(cfamily, dob);
                writer.append(rowKey, dob);
                dob.reset();
                cfamily.clear();
            }
           
            writer.closeAndOpenReader(0);
        }
View Full Code Here

    {
        logger_.info("Writing " + this);
        ColumnFamilyStore cfStore = Table.open(table_).getColumnFamilyStore(cfName_);
        SSTableWriter writer = new SSTableWriter(cfStore.getTempSSTablePath(), columnFamilies_.size(), StorageService.getPartitioner());

        DataOutputBuffer buffer = new DataOutputBuffer();
        for (DecoratedKey key : sortedKeys)
        {
            buffer.reset();
            ColumnFamily columnFamily = columnFamilies_.get(key);
            /* serialize the cf with column indexes */
            ColumnFamily.serializer().serializeWithIndexes(columnFamily, buffer);
            /* Now write the key and value to disk */
            writer.append(key, buffer);
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.DataOutputBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.