Examples of Append


Examples of org.apache.hadoop.hbase.client.Append

    @Override
    public void run() {
      int count = 0;
      while (count < appendCounter) {
        Append app = new Append(appendRow);
        app.add(family, qualifier, CHAR);
        count++;
        try {
          region.append(app);
        } catch (IOException e) {
          e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

      public Object run() throws Exception {
        byte[] row = TEST_ROW;
        byte[] qualifier = TEST_QUALIFIER;
        Put put = new Put(row);
        put.add(TEST_FAMILY, qualifier, Bytes.toBytes(1));
        Append append = new Append(row);
        append.add(TEST_FAMILY, qualifier, Bytes.toBytes(2));
        HTable t = new HTable(conf, TEST_TABLE.getTableName());
        try {
          t.put(put);
          t.append(append);
        } finally {
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

      table.put(put);
      Get get = new Get(row1);
      get.setAuthorizations(new Authorizations(SECRET));
      Result result = table.get(get);
      assertTrue(result.isEmpty());
      Append append = new Append(row1);
      append.add(fam, qual, Bytes.toBytes("b"));
      table.append(append);
      result = table.get(get);
      assertTrue(result.isEmpty());
      append = new Append(row1);
      append.add(fam, qual, Bytes.toBytes("c"));
      append.setCellVisibility(new CellVisibility(SECRET));
      table.append(append);
      result = table.get(get);
      assertTrue(!result.isEmpty());
    } finally {
      if (table != null) {
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

        byte[][] columnFamilies = dataGenerator.getColumnFamilies();
        while ((rowKeyBase = getNextKeyToUpdate()) < endKey) {
          if (RandomUtils.nextInt(100) < updatePercent) {
            byte[] rowKey = dataGenerator.getDeterministicUniqueKey(rowKeyBase);
            Increment inc = new Increment(rowKey);
            Append app = new Append(rowKey);
            numKeys.addAndGet(1);
            int columnCount = 0;
            for (byte[] cf : columnFamilies) {
              long cfHash = Arrays.hashCode(cf);
              inc.addColumn(cf, INCREMENT, cfHash);
              buf.setLength(0); // Clear the buffer
              buf.append("#").append(Bytes.toString(INCREMENT));
              buf.append(":").append(MutationType.INCREMENT.getNumber());
              app.add(cf, MUTATE_INFO, Bytes.toBytes(buf.toString()));
              ++columnCount;
              if (!isBatchUpdate) {
                mutate(table, inc, rowKeyBase);
                numCols.addAndGet(1);
                inc = new Increment(rowKey);
                mutate(table, app, rowKeyBase);
                numCols.addAndGet(1);
                app = new Append(rowKey);
              }
              Get get = new Get(rowKey);
              get.addFamily(cf);
              try {
                get = dataGenerator.beforeGet(rowKeyBase, get);
              } catch (Exception e) {
                // Ideally wont happen
                LOG.warn("Failed to modify the get from the load generator  = [" + get.getRow()
                    + "], column family = [" + Bytes.toString(cf) + "]", e);
              }
              Result result = getRow(get, rowKeyBase, cf);
              Map<byte[], byte[]> columnValues =
                result != null ? result.getFamilyMap(cf) : null;
              if (columnValues == null) {
                int specialPermCellInsertionFactor = Integer.parseInt(dataGenerator.getArgs()[1]);
                if (((int) rowKeyBase % specialPermCellInsertionFactor == 0)) {
                  LOG.info("Null result expected for the rowkey " + Bytes.toString(rowKey));
                } else {
                  failedKeySet.add(rowKeyBase);
                  LOG.error("Failed to update the row with key = [" + rowKey
                      + "], since we could not get the original row");
                }
              }
              if(columnValues != null) {
                for (byte[] column : columnValues.keySet()) {
                  if (Bytes.equals(column, INCREMENT) || Bytes.equals(column, MUTATE_INFO)) {
                    continue;
                  }
                  MutationType mt = MutationType
                      .valueOf(RandomUtils.nextInt(MutationType.values().length));
                  long columnHash = Arrays.hashCode(column);
                  long hashCode = cfHash + columnHash;
                  byte[] hashCodeBytes = Bytes.toBytes(hashCode);
                  byte[] checkedValue = HConstants.EMPTY_BYTE_ARRAY;
                  if (hashCode % 2 == 0) {
                    Cell kv = result.getColumnLatestCell(cf, column);
                    checkedValue = kv != null ? CellUtil.cloneValue(kv) : null;
                    Preconditions.checkNotNull(checkedValue,
                        "Column value to be checked should not be null");
                  }
                  buf.setLength(0); // Clear the buffer
                  buf.append("#").append(Bytes.toString(column)).append(":");
                  ++columnCount;
                  switch (mt) {
                  case PUT:
                    Put put = new Put(rowKey);
                    put.add(cf, column, hashCodeBytes);
                    mutate(table, put, rowKeyBase, rowKey, cf, column, checkedValue);
                    buf.append(MutationType.PUT.getNumber());
                    break;
                  case DELETE:
                    Delete delete = new Delete(rowKey);
                    // Delete all versions since a put
                    // could be called multiple times if CM is used
                    delete.deleteColumns(cf, column);
                    mutate(table, delete, rowKeyBase, rowKey, cf, column, checkedValue);
                    buf.append(MutationType.DELETE.getNumber());
                    break;
                  default:
                    buf.append(MutationType.APPEND.getNumber());
                    app.add(cf, column, hashCodeBytes);
                  }
                  app.add(cf, MUTATE_INFO, Bytes.toBytes(buf.toString()));
                  if (!isBatchUpdate) {
                    mutate(table, app, rowKeyBase);
                    numCols.addAndGet(1);
                    app = new Append(rowKey);
                  }
                }
              }
            }
            if (isBatchUpdate) {
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

    }
    requestCount.incrementAndGet();
    try {
      HRegion region = getRegion(regionName);
      Integer lock = getLockFromId(append.getLockId());
      Append appVal = append;
      Result resVal;
      if (region.getCoprocessorHost() != null) {
        resVal = region.getCoprocessorHost().preAppend(appVal);
        if (resVal != null) {
          return resVal;
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

  public void testAppendWithReadOnlyTable() throws Exception {
    byte[] TABLE = Bytes.toBytes("readOnlyTable");
    this.region = initHRegion(TABLE, getName(), conf, true, Bytes.toBytes("somefamily"));
    boolean exceptionCaught = false;
    Append append = new Append(Bytes.toBytes("somerow"));
    append.add(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
        Bytes.toBytes("somevalue"));
    try {
      region.append(append, false);
    } catch (IOException e) {
      exceptionCaught = true;
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

    }
    requestCount.incrementAndGet();
    try {
      HRegion region = getRegion(regionName);
      Integer lock = getLockFromId(append.getLockId());
      Append appVal = append;
      Result resVal;
      if (region.getCoprocessorHost() != null) {
        resVal = region.getCoprocessorHost().preAppend(appVal);
        if (resVal != null) {
          return resVal;
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

    @Override
    public void run() {
      int count = 0;
      while (count < appendCounter) {
        Append app = new Append(appendRow);
        app.add(family, qualifier, CHAR);
        count++;
        try {
          region.append(app, null, true);
        } catch (IOException e) {
          e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

      public Object run() throws Exception {
        byte[] row = Bytes.toBytes("random_row");
        byte[] qualifier = Bytes.toBytes("q");
        Put put = new Put(row);
        put.add(TEST_FAMILY, qualifier, Bytes.toBytes(1));
        Append append = new Append(row);
        append.add(TEST_FAMILY, qualifier, Bytes.toBytes(2));
        HTable t = new HTable(conf, TEST_TABLE);
        try {
          t.put(put);
          t.append(append);
        } finally {
View Full Code Here

Examples of org.apache.hadoop.hbase.client.Append

    }
    requestCount.incrementAndGet();
    try {
      HRegion region = getRegion(regionName);
      Integer lock = getLockFromId(append.getLockId());
      Append appVal = append;
      Result resVal;
      if (region.getCoprocessorHost() != null) {
        resVal = region.getCoprocessorHost().preAppend(appVal);
        if (resVal != null) {
          return resVal;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.