Package org.apache.cassandra.thrift

Examples of org.apache.cassandra.thrift.KeySlice$KeySliceTupleScheme


          range,
          ConsistencyLevel.ONE);
     
      KeySliceType[] types = new KeySliceType[slices.size()];
      for (int i = 0; i < types.length; i++) {
        KeySlice keySlice = slices.get(i);
        KeySliceType type = new KeySliceType();
        type.key = ByteArray.toUTF(keySlice.getKey());
        type.keyHex = new String(Hex.encodeHex(keySlice.getKey()));
        int clen = Math.min(columnCount, keySlice.getColumnsSize());
        type.columns = new String[clen];
        for (int j = 0; j < clen; j++) {
          ColumnOrSuperColumn cos = keySlice.columns.get(j);
          if (cos.isSetColumn()) {
            type.columns[j] = new String(Hex.encodeHex(cos.column.getName()));
          } else if (cos.isSetSuper_column()) {
            type.columns[j] = new String(Hex.encodeHex(cos.super_column.getName()));
          } else {
            type.columns[j] = "Unknown";
          }
        }
        if (keySlice.getColumnsSize() > columnCount) {
          type.hasMoreColumn = true;
        }
        types[i] = type;
      }
     
View Full Code Here


                              
                // reset to iterate through this new batch
                i = 0;
               
                // prepare for the next slice to be read
                KeySlice lastRow = rows.get(rows.size() - 1);
                ByteBuffer rowkey = lastRow.key;
                startToken = partitioner.getTokenFactory().toString(partitioner.getToken(rowkey));
            }
            catch (Exception e)
            {
View Full Code Here

            maybeInit();
            if (rows == null)
                return endOfData();
           
            totalRead++;
            KeySlice ks = rows.get(i++);
            SortedMap<ByteBuffer, IColumn> map = new TreeMap<ByteBuffer, IColumn>(comparator);
            for (ColumnOrSuperColumn cosc : ks.columns)
            {
                IColumn column = unthriftify(cosc);
                map.put(column.name(), column);
View Full Code Here

    unprefixed.start_key = ps.fromByteBuffer(prefixed.start_key);
    return unprefixed;
  }

  public KeySlice prefixKeySlice(KeySlice unprefixed) {
    KeySlice prefixed = new KeySlice();
    prefixed.key = ps.toByteBuffer(unprefixed.key);
    prefixed.columns = unprefixed.columns;
    return prefixed;
  }
View Full Code Here

    prefixed.columns = unprefixed.columns;
    return prefixed;
  }

  public KeySlice unprefixKeySlice(KeySlice prefixed) {
    KeySlice unprefixed = new KeySlice();
    unprefixed.key = ps.fromByteBuffer(unprefixed.key);
    unprefixed.columns = prefixed.columns;
    return unprefixed;
  }
View Full Code Here

                                    }
                                }, query.retry.duplicate()).getResult();

                        // Notify the callback
                        if (!ks.isEmpty()) {
                            KeySlice lastRow = Iterables.getLast(ks);
                            boolean bContinue = (ks.size() == getBlockSize());

                            if (getRepeatLastToken()) {
                                if (firstBlock) {
                                    firstBlock = false;
                                }
                                else {
                                    ks.remove(0);
                                }
                            }
                           
                            if (bIgnoreTombstones) {
                                Iterator<KeySlice> iter = ks.iterator();
                                while (iter.hasNext()) {
                                    if (iter.next().getColumnsSize() == 0)
                                        iter.remove();
                                }
                            }
                            Rows<K, C> rows = new ThriftRowsSliceImpl<K, C>(ks, columnFamily
                                    .getKeySerializer(), columnFamily.getColumnSerializer());
                            try {
                                callback.success(rows);
                            }
                            catch (Throwable t) {
                                ConnectionException ce = ThriftConverter.ToConnectionPoolException(t);
                                error.set(ce);
                                return false;
                            }
                           
                            if (bContinue) {
                                // Determine the start token for the next page
                                String token = partitioner.getTokenForKey(lastRow.bufferForKey()).toString();
                                checkpointManager.trackCheckpoint(tokenPair.left, token);
                                if (getRepeatLastToken()) {
                                    range.setStart_token(partitioner.getTokenMinusOne(token));
                                }
                                else {
View Full Code Here

                    iter = list.iterator();
                    if (iter == null || !iter.hasNext()) {
                        return false;
                    }

                    KeySlice previousLastRow = lastRow;
                    lastRow = Iterables.getLast(list);
                   
                    if (query.getRepeatLastToken() && previousLastRow != null) {
                        iter.next();
                        iter.remove();
                    }
                   
                    if (iter.hasNext() && bIgnoreTombstones) {
                        // Discard any tombstones
                        while (iter.hasNext()) {
                            KeySlice row = iter.next();
                            if (row.getColumns().isEmpty()) {
                                iter.remove();
                            }
                        }
                       
                        // Get the iterator again
                        iter = list.iterator();
                    }
                }
                return iter.hasNext();
            }

            @Override
            public Row<K, C> next() {
                org.apache.cassandra.thrift.KeySlice row = iter.next();
                return new ThriftRowImpl<K, C>(columnFamily.getKeySerializer().fromBytes(row.getKey()),
                        ByteBuffer.wrap(row.getKey()), new ThriftColumnOrSuperColumnListImpl<C>(row.getColumns(),
                                columnFamily.getColumnSerializer()));
            }

            @Override
            public void remove() {
View Full Code Here

TOP

Related Classes of org.apache.cassandra.thrift.KeySlice$KeySliceTupleScheme

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.