Package org.apache.cassandra.db

Examples of org.apache.cassandra.db.ColumnFamily$ColumnFamilySerializer


        String queryFor = sColumnFamily;
        if(sColumn != null && !"*".equals(sColumn))
        {
          queryFor += ":" + sColumn;
        }
          ColumnFamily cf = table.get(sKey, queryFor);

          if (cf == null)
          {
              sRetVal = "Key [" + sKey + "], column family [" + sColumnFamily + "] not found.";
          }
View Full Code Here


        Map<String, ColumnFamily> columnFamilies = diffRow.getColumnFamilyMap();
          Set<String> cfNames = columnFamilies.keySet();
         
          for ( String cfName : cfNames )
          {
              ColumnFamily cf = columnFamilies.get(cfName);
              rowMutation.add(cf);
          }
            RowMutationMessage rowMutationMessage = new RowMutationMessage(rowMutation);
          // schedule the read repair
          ReadRepairManager.instance().schedule(endPoints.get(i),rowMutationMessage);
View Full Code Here

                versionSources.add(current.right);
            }

            protected Row getReduced()
            {
                ColumnFamily resolved = versions.size() > 1
                                      ? RowRepairResolver.resolveSuperset(versions)
                                      : versions.get(0);
                if (versions.size() < sources.size())
                {
                    // add placeholder rows for sources that didn't have any data, so maybeScheduleRepairs sees them
View Full Code Here

        }
    }

    public static Message createMessage(String keyspace, byte[] key, String columnFamily, List<ColumnFamily> columnFamilies)
    {
        ColumnFamily baseColumnFamily;
        DataOutputBuffer bufOut = new DataOutputBuffer();
        RowMutation rm;
        Message message;
        Column column;

        /* Get the first column family from list, this is just to get past validation */
        baseColumnFamily = new ColumnFamily(ColumnFamilyType.Standard,
                                            DatabaseDescriptor.getComparator(keyspace, columnFamily),
                                            DatabaseDescriptor.getSubComparator(keyspace, columnFamily),
                                            CFMetaData.getId(keyspace, columnFamily));
       
        for(ColumnFamily cf : columnFamilies) {
            bufOut.reset();
            ColumnFamily.serializer().serializeWithIndexes(cf, bufOut);
            byte[] data = new byte[bufOut.getLength()];
            System.arraycopy(bufOut.getData(), 0, data, 0, bufOut.getLength());

            column = new Column(FBUtilities.toByteBuffer(cf.id()), ByteBuffer.wrap(data), 0);
            baseColumnFamily.addColumn(column);
        }
        rm = new RowMutation(keyspace, ByteBuffer.wrap(key));
        rm.add(baseColumnFamily);

        try
View Full Code Here

            StorageService.instance.stopClient();
        }

        public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter) throws IOException
        {
            ColumnFamily columnFamily;
            String keyspace = "Keyspace1";
            String cfName = "Super1";
            Message message;
            List<ColumnFamily> columnFamilies;
            columnFamilies = new LinkedList<ColumnFamily>();
            String line;

            /* Create a column family */
            columnFamily = ColumnFamily.create(keyspace, cfName);
            while (values.hasNext()) {
                // Split the value (line based on your own delimiter)
                line = values.next().toString();
                String[] fields = line.split("\1");
                String SuperColumnName = fields[1];
                String ColumnName = fields[2];
                String ColumnValue = fields[3];
                int timestamp = 0;
                columnFamily.addColumn(new QueryPath(cfName,
                                                     ByteBufferUtil.bytes(SuperColumnName),
                                                     ByteBufferUtil.bytes(ColumnName)),
                                       ByteBufferUtil.bytes(ColumnValue),
                                       timestamp);
            }
View Full Code Here

        Set<SSTable> sstables = new HashSet<SSTable>();
        for (SSTableIdentityIterator row : rows)
        {
            sstables.add(row.sstable);
            ColumnFamily cf = row.getColumnFamily();

            if (emptyColumnFamily == null)
                emptyColumnFamily = cf;
            else
                emptyColumnFamily.delete(cf);
View Full Code Here

        protected IColumn getReduced()
        {
            assert container != null;
            IColumn reduced = container.iterator().next();
            ColumnFamily purged = shouldPurge ? ColumnFamilyStore.removeDeleted(container, gcBefore) : container;
            if (purged == null || !purged.iterator().hasNext())
            {
                container.clear();
                return null;
            }
            container.clear();
View Full Code Here

    }

    public ColumnFamily getColumnFamilyWithColumns() throws IOException
    {
        file.seek(columnPosition - 4); // seek to before column count int
        ColumnFamily cf = columnFamily.cloneMeShallow();
        ColumnFamily.serializer().deserializeColumns(file, cf);
        if (validateColumns)
        {
            try
            {
                cf.validateColumnFields();
            }
            catch (MarshalException e)
            {
                throw new IOException("Error validating row " + key, e);
            }
View Full Code Here

        }
        boolean shouldPurge = major || !cfStore.isKeyInRemainingSSTables(key, sstables);

        if (rows.size() > 1 || shouldPurge || !rows.get(0).sstable.descriptor.isLatestVersion || forceDeserialize)
        {
            ColumnFamily cf = null;
            for (SSTableIdentityIterator row : rows)
            {
                ColumnFamily thisCF;
                try
                {
                    thisCF = row.getColumnFamilyWithColumns();
                }
                catch (IOException e)
                {
                    logger.error("Skipping row " + key + " in " + row.getPath(), e);
                    continue;
                }
                if (cf == null)
                {
                    cf = thisCF;
                }
                else
                {
                    cf.addAll(thisCF);
                }
            }
            ColumnFamily cfPurged = shouldPurge ? ColumnFamilyStore.removeDeleted(cf, gcBefore) : cf;
            if (cfPurged == null)
                return;
            columnCount = ColumnFamily.serializer().serializeWithIndexes(cfPurged, buffer);
        }
        else
View Full Code Here

            }

            // If the raw is cached, we call removeDeleted on it to have/ coherent query returns. However it would look
            // like some deleted columns lived longer than gc_grace + compaction. This can also free up big amount of
            // memory on long running instances
            ColumnFamily cachedRow = cfs.getRawCachedRow(compactedRow.key);
            if (cachedRow != null)
                ColumnFamilyStore.removeDeleted(cachedRow, gcBefore);

            return compactedRow;
        }
View Full Code Here

TOP

Related Classes of org.apache.cassandra.db.ColumnFamily$ColumnFamilySerializer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.