Package org.apache.cassandra.io

Examples of org.apache.cassandra.io.DataOutputBuffer


          {
              mergedFileName = getTempFileName();
              SSTable ssTableRange = null ;
              String lastkey = null;
              List<FileStruct> lfs = new ArrayList<FileStruct>();
              DataOutputBuffer bufOut = new DataOutputBuffer();
              int expectedBloomFilterSize = SSTable.getApproximateKeyCount(files);
              expectedBloomFilterSize = (expectedBloomFilterSize > 0) ? expectedBloomFilterSize : SSTable.indexInterval();
              logger_.debug("Expected bloom filter size : " + expectedBloomFilterSize);
              /* Create the bloom filter for the compacted file. */
              BloomFilter compactedRangeBloomFilter = new BloomFilter(expectedBloomFilterSize, 15);
              List<ColumnFamily> columnFamilies = new ArrayList<ColumnFamily>();

              while (pq.size() > 0 || lfs.size() > 0)
              {
                  FileStruct fs = null;
                  if (pq.size() > 0)
                  {
                      fs = pq.poll();
                  }
                  if (fs != null
                          && (lastkey == null || lastkey.equals(fs.getKey())))
                  {
                      // The keys are the same so we need to add this to the
                      // ldfs list
                      lastkey = fs.getKey();
                      lfs.add(fs);
                  }
                  else
                  {
                      Collections.sort(lfs, new FileStructComparator());
                      ColumnFamily columnFamily;
                      bufOut.reset();
                      if(lfs.size() > 1)
                      {
                        for (FileStruct filestruct : lfs)
                        {
                          try
                          {
                                  /* read the length although we don't need it */
                                  filestruct.getBufIn().readInt();
                                  // Skip the Index
                                    IndexHelper.skipBloomFilterAndIndex(filestruct.getBufIn());
                                  // We want to add only 2 and resolve them right there in order to save on memory footprint
                                  if(columnFamilies.size() > 1)
                                  {
                                  // Now merge the 2 column families
                                        merge(columnFamilies);
                                  }
                              // deserialize into column families
                              columnFamilies.add(ColumnFamily.serializer().deserialize(filestruct.getBufIn()));
                          }
                          catch ( Exception ex)
                          {
                                    logger_.warn(LogUtil.throwableToString(ex));
                                }
                        }
                        // Now after merging all crap append to the sstable
                        columnFamily = resolveAndRemoveDeleted(columnFamilies);
                        columnFamilies.clear();
                        if( columnFamily != null )
                        {
                        /* serialize the cf with column indexes */
                          ColumnFamily.serializerWithIndexes().serialize(columnFamily, bufOut);
                        }
                      }
                      else
                      {
                        FileStruct filestruct = lfs.get(0);
                        try
                        {
                            /* read the length although we don't need it */
                            int size = filestruct.getBufIn().readInt();
                            bufOut.write(filestruct.getBufIn(), size);
                        }
                        catch ( Exception ex)
                        {
                          logger_.warn(LogUtil.throwableToString(ex));
                              filestruct.close();
View Full Code Here


        {
            String mergedFileName = getTempFileName( files );
            SSTable ssTable = null;
            String lastkey = null;
            List<FileStruct> lfs = new ArrayList<FileStruct>();
            DataOutputBuffer bufOut = new DataOutputBuffer();
            int expectedBloomFilterSize = SSTable.getApproximateKeyCount(files);
            expectedBloomFilterSize = (expectedBloomFilterSize > 0) ? expectedBloomFilterSize : SSTable.indexInterval();
            logger_.debug("Expected bloom filter size : " + expectedBloomFilterSize);
            /* Create the bloom filter for the compacted file. */
            BloomFilter compactedBloomFilter = new BloomFilter(expectedBloomFilterSize, 15);
            List<ColumnFamily> columnFamilies = new ArrayList<ColumnFamily>();

            while (pq.size() > 0 || lfs.size() > 0)
            {
                FileStruct fs = null;
                if (pq.size() > 0)
                {
                    fs = pq.poll();
                }
                if (fs != null
                        && (lastkey == null || lastkey.equals(fs.getKey())))
                {
                    // The keys are the same so we need to add this to the
                    // ldfs list
                    lastkey = fs.getKey();
                    lfs.add(fs);
                }
                else
                {
                    Collections.sort(lfs, new FileStructComparator());
                    ColumnFamily columnFamily;
                    bufOut.reset();
                    if(lfs.size() > 1)
                    {
                        for (FileStruct filestruct : lfs)
                        {
                            try
                            {
                                /* read the length although we don't need it */
                                filestruct.getBufIn().readInt();
                                // Skip the Index
                                IndexHelper.skipBloomFilterAndIndex(filestruct.getBufIn());
                                // We want to add only 2 and resolve them right there in order to save on memory footprint
                                if(columnFamilies.size() > 1)
                                {
                                    merge(columnFamilies);
                                }
                                // deserialize into column families
                                columnFamilies.add(ColumnFamily.serializer().deserialize(filestruct.getBufIn()));
                            }
                            catch ( Exception ex)
                            {
                                logger_.warn("error in filecompaction", ex);
                            }
                        }
                        // Now after merging all crap append to the sstable
                        columnFamily = resolveAndRemoveDeleted(columnFamilies);
                        columnFamilies.clear();
                        if( columnFamily != null )
                        {
                            /* serialize the cf with column indexes */
                            ColumnFamily.serializerWithIndexes().serialize(columnFamily, bufOut);
                        }
                    }
                    else
                    {
                        FileStruct filestruct = lfs.get(0);
                        try
                        {
                            /* read the length although we don't need it */
                            int size = filestruct.getBufIn().readInt();
                            bufOut.write(filestruct.getBufIn(), size);
                        }
                        catch ( Exception ex)
                        {
                            ex.printStackTrace();
                            filestruct.close();
View Full Code Here

            public int compare(String o1, String o2)
            {
                return dc.compare(partitioner.decorateKey(o1), partitioner.decorateKey(o2));
            }
        });
        DataOutputBuffer buffer = new DataOutputBuffer();
        /* Use this BloomFilter to decide if a key exists in a SSTable */
        BloomFilter bf = new BloomFilter(columnFamilies_.size(), 15);
        for (String key : orderedKeys)
        {
            buffer.reset();
            ColumnFamily columnFamily = columnFamilies_.get(key);
            if ( columnFamily != null )
            {
                /* serialize the cf with column indexes */
                ColumnFamily.serializerWithIndexes().serialize( columnFamily, buffer );
                /* Now write the key and value to disk */
                ssTable.append(partitioner.decorateKey(key), buffer);
                bf.add(key);
                columnFamily.clear();
            }
        }
        ssTable.close(bf);
        cfStore.onMemtableFlush(cLogCtx);
        cfStore.storeLocation( ssTable.getDataFileLocation(), bf );
        buffer.close();

        columnFamilies_.clear();
    }
View Full Code Here

    public FileStruct(IFileReader reader, IPartitioner partitioner)
    {
        this.reader = reader;
        this.partitioner = partitioner;
        bufIn = new DataInputBuffer();
        bufOut = new DataOutputBuffer();
    }
View Full Code Here

    }

    public static Filter testSerialize(Filter f) throws IOException
    {
        f.add("a");
        DataOutputBuffer out = new DataOutputBuffer();
        f.getSerializer().serialize(f, out);

        DataInputBuffer in = new DataInputBuffer();
        in.reset(out.getData(), out.getLength());
        Filter f2 = f.getSerializer().deserialize(in);

        assert f2.isPresent("a");
        assert !f2.isPresent("b");
        return f2;
View Full Code Here

    public static void serialize(ColumnFamily columnFamily, DataOutput dos)
  {
        Collection<IColumn> columns = columnFamily.getSortedColumns();
        BloomFilter bf = createColumnBloomFilter(columns);                   
        /* Write out the bloom filter. */
        DataOutputBuffer bufOut = new DataOutputBuffer();
        try
        {
            BloomFilter.serializer().serialize(bf, bufOut);
            /* write the length of the serialized bloom filter. */
            dos.writeInt(bufOut.getLength());
            /* write out the serialized bytes. */
            dos.write(bufOut.getData(), 0, bufOut.getLength());

            /* Do the indexing */
            doIndexing(columnFamily.getComparator(), columns, dos);
        }
        catch (IOException e)
View Full Code Here

        this.rows = rows;
    }

    public Message getReply(Message originalMessage) throws IOException
    {
        DataOutputBuffer dob = new DataOutputBuffer();
        dob.writeInt(rows.size());
        for (Row row : rows)
        {
            Row.serializer().serialize(row, dob);
        }
        byte[] data = Arrays.copyOf(dob.getData(), dob.getLength());
        return originalMessage.getReply(FBUtilities.getLocalAddress(), data);
    }
View Full Code Here

        this.max_keys = max_keys;
    }

    public Message getMessage() throws IOException
    {
        DataOutputBuffer dob = new DataOutputBuffer();
        serializer.serialize(this, dob);
        return new Message(FBUtilities.getLocalAddress(),
                           StageManager.readStage_,
                           StorageService.rangeSliceVerbHandler_,
                           Arrays.copyOf(dob.getData(), dob.getLength()));
    }
View Full Code Here

        Token mid = part.midpoint(min, min);
        validator.prepare();

        // add a row with the minimum token
        validator.add(new CompactedRow(new DecoratedKey(min, "nonsense!"),
                                       new DataOutputBuffer()));

        // and a row after it
        validator.add(new CompactedRow(new DecoratedKey(mid, "inconceivable!"),
                                       new DataOutputBuffer()));
        validator.complete();

        // confirm that the tree was validated
        assert null != validator.tree.hash(new Range(min, min));
    }
View Full Code Here

        assertEquals(endPoint, expected.getEndPoint());
        assertEquals(generation, expected.getGeneration());
        assertEquals(maxVersion, expected.getMaxVersion());
       
        //test the serialization and equals
        DataOutputBuffer output = new DataOutputBuffer();
        GossipDigest.serializer().serialize(expected, output);
       
        DataInputBuffer input = new DataInputBuffer();
        input.reset(output.getData(), output.getLength());
        GossipDigest actual = GossipDigest.serializer().deserialize(input);
        assertEquals(0, expected.compareTo(actual));
    }
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.DataOutputBuffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.