Package org.apache.cassandra.io

Examples of org.apache.cassandra.io.DataOutputBuffer


            while ( (line = bufReader.readLine()) != null )
            {
                /* After accumulating count_ keys reset the bloom filter. */
                if ( keyCount > 0 && keyCount % count_ == 0 )
                {                      
                    DataOutputBuffer bufOut = new DataOutputBuffer();
                    BloomFilter.serializer().serialize(bf, bufOut);
                    System.out.println("Finished serializing the bloom filter");
                    buffers.add(bufOut);
                    bf = new BloomFilter(count_, 8);
                }
                line = line.trim();               
                bf.add(line);
                ++keyCount;
            }
        }
       
        /* Add the bloom filter assuming the last one was left out */
        DataOutputBuffer bufOut = new DataOutputBuffer();
        BloomFilter.serializer().serialize(bf, bufOut);
        buffers.add(bufOut);
       
       
        int size = buffers.size();
        for ( int i = 0; i < size; ++i )
        {
            DataOutputBuffer buffer = buffers.get(i);
            String file = args[1] + System.getProperty("file.separator") + "Bloom-Filter-" + i + ".dat";
            RandomAccessFile raf = new RandomAccessFile(file, "rw");
            raf.write(buffer.getData(), 0, buffer.getLength());
            raf.close();
            buffer.close();
        }
        System.out.println("Done writing the bloom filter to disk");
    }
View Full Code Here


    }

    public static boolean extractKeyIntoFile(String keyToExtract, String dataFile, String outputFile) throws IOException
    {
    IFileReader dataReader = SequenceFile.bufferedReader(dataFile, bufferSize_);
        DataOutputBuffer bufOut = new DataOutputBuffer();
        DataInputBuffer bufIn = new DataInputBuffer();

      try
      {
            while ( !dataReader.isEOF() )
            {
                bufOut.reset();
                dataReader.next(bufOut);
                bufIn.reset(bufOut.getData(), bufOut.getLength());
                /* Key just read */
                String key = bufIn.readUTF();
                /* check if we want this key */
                if ( key.equals(keyToExtract) )
                {
View Full Code Here

        return false;
    }
   
    private static void dumpBlockIndex(String key, long position, long size, RandomAccessFile raf) throws IOException
    {
        DataOutputBuffer bufOut = new DataOutputBuffer();                      
        /* Number of keys in this block */
        bufOut.writeInt(1);
        bufOut.writeUTF(key);
        bufOut.writeLong(position);
        bufOut.writeLong(size);
       
        /* Write out the block index. */
        raf.writeUTF(SSTable.blockIndexKey_);
        raf.writeInt(bufOut.getLength());
        raf.write(bufOut.getData(), 0, bufOut.getLength());
    }
View Full Code Here

    public void updateDigest(MessageDigest digest)
    {
        assert name_ != null;
        digest.update(name_);
        DataOutputBuffer buffer = new DataOutputBuffer();
        try
        {
            buffer.writeLong(markedForDeleteAt);
        }
        catch (IOException e)
        {
            throw new RuntimeException(e);
        }
        digest.update(buffer.getData(), 0, buffer.getLength());
        for (IColumn column : columns_.values())
        {
            column.updateDigest(digest);
        }
    }
View Full Code Here

    public static void serialize(ColumnFamily columnFamily, DataOutput dos) throws IOException
  {
        Collection<IColumn> columns = columnFamily.getSortedColumns();
        BloomFilter bf = createColumnBloomFilter(columns);                   
        /* Write out the bloom filter. */
        DataOutputBuffer bufOut = new DataOutputBuffer();
        BloomFilter.serializer().serialize(bf, bufOut);
        /* write the length of the serialized bloom filter. */
        dos.writeInt(bufOut.getLength());
        /* write out the serialized bytes. */
        dos.write(bufOut.getData(), 0, bufOut.getLength());

        /* Do the indexing */
        doIndexing(columnFamily.getComparator(), columns, dos);
  }
View Full Code Here

            public int compare(String o1, String o2)
            {
                return dc.compare(partitioner.decorateKey(o1), partitioner.decorateKey(o2));
            }
        });
        DataOutputBuffer buffer = new DataOutputBuffer();
        for (String key : orderedKeys)
        {
            buffer.reset();
            ColumnFamily columnFamily = columnFamilies_.get(key);
            if (columnFamily != null)
            {
                /* serialize the cf with column indexes */
                ColumnFamily.serializer().serializeWithIndexes(columnFamily, buffer);
                /* Now write the key and value to disk */
                writer.append(partitioner.decorateKey(key), buffer);
            }
        }
        SSTableReader ssTable = writer.closeAndOpenReader();
        cfStore.onMemtableFlush(cLogCtx);
        cfStore.storeLocation(ssTable);
        buffer.close();
        isFlushed_ = true;
        logger_.info("Completed flushing " + ssTable.getFilename());
    }
View Full Code Here

        }

        public CommitLog.CommitLogContext call() throws Exception
        {
            long currentPosition = -1L;
            DataOutputBuffer cfBuffer = new DataOutputBuffer();
            try
            {
                /* serialize the row */
                Row.serializer().serialize(row, cfBuffer);
                currentPosition = logWriter_.getFilePointer();
                CommitLogContext cLogCtx = new CommitLogContext(logFile_, currentPosition);
                /* Update the header */
                maybeUpdateHeader(row);
                logWriter_.writeLong(cfBuffer.getLength());
                logWriter_.write(cfBuffer.getData(), 0, cfBuffer.getLength());
                maybeRollLog();
                return cLogCtx;
            }
            catch (IOException e)
            {
View Full Code Here

    public void updateDigest(MessageDigest digest)
    {
        digest.update(name);
        digest.update(value);
        DataOutputBuffer buffer = new DataOutputBuffer();
        try
        {
            buffer.writeLong(timestamp);
            buffer.writeBoolean(isMarkedForDelete);
        }
        catch (IOException e)
        {
            throw new RuntimeException(e);
        }
        digest.update(buffer.getData(), 0, buffer.getLength());
    }
View Full Code Here

    public void updateDigest(MessageDigest digest)
    {
        assert name_ != null;
        digest.update(name_);
        DataOutputBuffer buffer = new DataOutputBuffer();
        try
        {
            buffer.writeLong(markedForDeleteAt);
        }
        catch (IOException e)
        {
            throw new RuntimeException(e);
        }
        digest.update(buffer.getData(), 0, buffer.getLength());
        for (IColumn column : columns_.values())
        {
            column.updateDigest(digest);
        }
    }
View Full Code Here

    {
        ColumnFamily cf;

        cf = ColumnFamily.create("Keyspace1", "Standard1");
        cf.addColumn(column("C", "v", 1));
        DataOutputBuffer bufOut = new DataOutputBuffer();
        ColumnFamily.serializer().serialize(cf, bufOut);

        DataInputBuffer bufIn = new DataInputBuffer();
        bufIn.reset(bufOut.getData(), bufOut.getLength());
        cf = ColumnFamily.serializer().deserialize(bufIn);
        assert cf != null;
        assert cf.name().equals("Standard1");
        assert cf.getSortedColumns().size() == 1;
    }
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.DataOutputBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.