Package org.apache.cassandra.utils

Examples of org.apache.cassandra.utils.BloomFilter


    public static void serializeInternal(IIterableColumns columns, DataOutput dos) throws IOException
    {
        int columnCount = columns.getEstimatedColumnCount();

        BloomFilter bf = BloomFilter.getFilter(columnCount, 4);

        if (columnCount == 0)
        {
            writeEmptyHeader(dos, bf);
            return;
        }

        // update bloom filter and create a list of IndexInfo objects marking the first and last column
        // in each block of ColumnIndexSize
        List<IndexHelper.IndexInfo> indexList = new ArrayList<IndexHelper.IndexInfo>();
        int endPosition = 0, startPosition = -1;
        int indexSizeInBytes = 0;
        IColumn lastColumn = null, firstColumn = null;
        for (IColumn column : columns)
        {
            bf.add(column.name());

            if (firstColumn == null)
            {
                firstColumn = column;
                startPosition = endPosition;
View Full Code Here


        */
        SSTable ssTable = new SSTable(directory, filename);
        List<String> keys = new ArrayList<String>( columnFamilies_.keySet() );
        Collections.sort(keys);       
        /* Use this BloomFilter to decide if a key exists in a SSTable */
        BloomFilter bf = new BloomFilter(keys.size(), 8);
        for ( String key : keys )
        {          
            byte[] bytes = columnFamilies_.get(key);
            if ( bytes.length > 0 )
            {             
                /* Now write the key and value to disk */
                ssTable.append(key, bytes);
                bf.add(key);
            }
        }
        ssTable.close(bf);
        cfStore.storeLocation( ssTable.getDataFileLocation(), bf );
        columnFamilies_.clear();      
View Full Code Here

            int size = file_.readInt();
            byte[] bytes = new byte[size];
            file_.readFully(bytes);
            DataInputBuffer bufIn = new DataInputBuffer();
            bufIn.reset(bytes, bytes.length);
            BloomFilter bf = BloomFilter.serializer().deserialize(bufIn);
            return bf;
        }
View Full Code Here

            }
            else
            {
                /* Read the bloom filter summarizing the columns */
                long preBfPos = file_.getFilePointer();
                BloomFilter bf = defreezeBloomFilter();
                long postBfPos = file_.getFilePointer();
                dataSize -= (postBfPos - preBfPos);

                List<IndexHelper.ColumnIndexInfo> columnIndexList = new ArrayList<IndexHelper.ColumnIndexInfo>();
                /* read the column name indexes if present */
 
View Full Code Here

{
    private static void rawSSTableWrite() throws Throwable
    {
        SSTable ssTable = new SSTable("C:\\Engagements\\Cassandra", "Table-Test-1");
        DataOutputBuffer bufOut = new DataOutputBuffer();
        BloomFilter bf = new BloomFilter(1000, 8);
        byte[] bytes = new byte[64*1024];
        Random random = new Random();
        for ( int i = 100; i < 1000; ++i )
        {
            String key = Integer.toString(i);
            ColumnFamily cf = new ColumnFamily("Test", "Standard");
            bufOut.reset();          
            // random.nextBytes(bytes);
            cf.addColumn("C", "Avinash Lakshman is a good man".getBytes(), i);
            ColumnFamily.serializerWithIndexes().serialize(cf, bufOut);
            ssTable.append(key, bufOut);           
            bf.add(key);
        }
        ssTable.close(bf);
    }
View Full Code Here

        }
    }
   
    public static void main(String[] args) throws Throwable
    {
        BloomFilter bf = new BloomFilter(1024*1024, 15);
        for ( int i = 0; i < 1024*1024; ++i )
        {
            bf.add(Integer.toString(i));
        }
       
        DataOutputBuffer bufOut = new DataOutputBuffer();
        BloomFilter.serializer().serialize(bf, bufOut);
        FileOutputStream fos = new FileOutputStream("C:\\Engagements\\bf.dat", true);
        fos.write(bufOut.getData(), 0, bufOut.getLength());
        fos.close();
       
        FileInputStream fis = new FileInputStream("C:\\Engagements\\bf.dat");
        byte[] bytes = new byte[fis.available()];
        fis.read(bytes);
        DataInputBuffer bufIn = new DataInputBuffer();
        bufIn.reset(bytes, bytes.length );
        BloomFilter bf2 = BloomFilter.serializer().deserialize(bufIn);
       
        int count = 0;
        for ( int i = 0; i < 1024*1024; ++i )
        {
            if ( bf.isPresent(Integer.toString(i)) )
View Full Code Here

     * key is not present then we skip processing this file.
    */
    public static boolean isKeyInFile(String clientKey, String filename)
    {
        boolean bVal = false;
        BloomFilter bf = bfs_.get(filename);
        if ( bf != null )
        {
            bVal = bf.isPresent(clientKey);
        }
        return bVal;
    }
View Full Code Here

    public SSTableWriter(String filename, long keyCount, IPartitioner partitioner) throws IOException
    {
        super(filename, partitioner);
        dataFile = new BufferedRandomAccessFile(path, "rw", (int)(DatabaseDescriptor.getFlushDataBufferSizeInMB() * 1024 * 1024));
        indexFile = new BufferedRandomAccessFile(indexFilename(), "rw", (int)(DatabaseDescriptor.getFlushIndexBufferSizeInMB() * 1024 * 1024));
        bf = new BloomFilter((int)keyCount, 15); // TODO fix long -> int cast
    }
View Full Code Here

    public SSTableWriter(String filename, int keyCount, IPartitioner partitioner) throws IOException
    {
        super(filename, partitioner);
        dataFile = new BufferedRandomAccessFile(path, "rw", (int)(DatabaseDescriptor.getFlushDataBufferSizeInMB() * 1024 * 1024));
        indexFile = new BufferedRandomAccessFile(indexFilename(), "rw", (int)(DatabaseDescriptor.getFlushIndexBufferSizeInMB() * 1024 * 1024));
        bf = new BloomFilter(keyCount, 15);
    }
View Full Code Here

            assert keyInDisk.equals(decoratedKey)
                   : String.format("%s != %s in %s", keyInDisk, decoratedKey, file.getPath());
            file.readInt(); // data size

            /* Read the bloom filter summarizing the columns */
            BloomFilter bf = IndexHelper.defreezeBloomFilter(file);
            List<byte[]> filteredColumnNames = new ArrayList<byte[]>(columnNames.size());
            for (byte[] name : columnNames)
            {
                if (bf.isPresent(name))
                {
                    filteredColumnNames.add(name);
                }
            }
            if (filteredColumnNames.isEmpty())
View Full Code Here

TOP

Related Classes of org.apache.cassandra.utils.BloomFilter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.