Package org.apache.hadoop.hbase.thrift2.generated

Examples of org.apache.hadoop.hbase.thrift2.generated.TAppend


    TResult result = handler.get(table, get);
    assertEquals(2, result.getColumnValuesSize());

    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<TColumn>();
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);
    delete.setDeleteType(TDeleteType.DELETE_COLUMN);

    handler.deleteSingle(table, delete);
View Full Code Here


    }

    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScan".getBytes());
    scan.setStopRow("testScan\uffff".getBytes());
View Full Code Here

    }

    // create scan instance with filter
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScanWithFilter".getBytes());
    scan.setStopRow("testScanWithFilter\uffff".getBytes());
    // only get the key part
View Full Code Here

    handler.put(table, put);

    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testScanWithBatchSize".getBytes());
    scan.setStopRow("testScanWithBatchSize\uffff".getBytes());
    // set batch size to 10 columns per call
View Full Code Here

    }

    // create scan instance
    TScan scan = new TScan();
    List<TColumn> columns = new ArrayList<TColumn>();
    TColumn column = new TColumn();
    column.setFamily(familyAname);
    column.setQualifier(qualifierAname);
    columns.add(column);
    scan.setColumns(columns);
    scan.setStartRow("testGetScannerResults".getBytes());

    // get 5 rows and check the returned results
View Full Code Here

    }

    // Map<family, List<KeyValue>>
    for (Map.Entry<byte[], List<org.apache.hadoop.hbase.Cell>> familyEntry:
        in.getFamilyCellMap().entrySet()) {
      TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey()));
      for (org.apache.hadoop.hbase.Cell cell: familyEntry.getValue()) {
        KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
        byte[] family = kv.getFamily();
        byte[] qualifier = kv.getQualifier();
        long timestamp = kv.getTimestamp();
        if (family != null) {
          column.setFamily(family);
        }
        if (qualifier != null) {
          column.setQualifier(qualifier);
        }
        if (timestamp != HConstants.LATEST_TIMESTAMP) {
          column.setTimestamp(kv.getTimestamp());
        }
      }
      columns.add(column);
    }
    out.setColumns(columns);
View Full Code Here

    TPut putB = new TPut(wrap(rowName), columnValuesB);
    putB.setColumnValues(columnValuesB);

    TDelete delete = new TDelete(wrap(rowName));
    List<TColumn> deleteColumns = new ArrayList<TColumn>();
    TColumn deleteColumn = new TColumn(wrap(familyAname));
    deleteColumn.setQualifier(qualifierAname);
    deleteColumns.add(deleteColumn);
    delete.setColumns(deleteColumns);

    List<TMutation> mutations = new ArrayList<TMutation>();
    TMutation mutationA = TMutation.put(putB);
View Full Code Here

    TPut put = new TPut(wrap(rowName), columnValues);
    put.setColumnValues(columnValues);
    handler.put(table, put);

    List<TColumnIncrement> incrementColumns = new ArrayList<TColumnIncrement>();
    incrementColumns.add(new TColumnIncrement(wrap(familyAname), wrap(qualifierAname)));
    TIncrement increment = new TIncrement(wrap(rowName), incrementColumns);
    handler.increment(table, increment);

    TGet get = new TGet(wrap(rowName));
    TResult result = handler.get(table, get);
View Full Code Here

    byte[] rowName = "testDurability".getBytes();
    List<TColumnValue> columnValues = new ArrayList<TColumnValue>();
    columnValues.add(new TColumnValue(wrap(familyAname), wrap(qualifierAname), wrap(valueAname)));

    List<TColumnIncrement> incrementColumns = new ArrayList<TColumnIncrement>();
    incrementColumns.add(new TColumnIncrement(wrap(familyAname), wrap(qualifierAname)));

    TDelete tDelete = new TDelete(wrap(rowName));

    //if not setting writeToWal, check for default value
    Delete delete = deleteFromThrift(tDelete);
View Full Code Here

    if (row != null) {
      out.setRow(in.getRow());
    }
    List<TColumnValue> columnValues = new ArrayList<TColumnValue>();
    for (Cell kv : raw) {
      TColumnValue col = new TColumnValue();
      col.setFamily(CellUtil.cloneFamily(kv));
      col.setQualifier(CellUtil.cloneQualifier(kv));
      col.setTimestamp(kv.getTimestamp());
      col.setValue(CellUtil.cloneValue(kv));
      columnValues.add(col);
    }
    out.setColumnValues(columnValues);
    return out;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.thrift2.generated.TAppend

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.