Examples of PageFilter


Examples of org.apache.hadoop.hbase.filter.PageFilter

   
    // Grab all 6 rows
    long expectedRows = 6;
    long expectedKeys = colsPerRow;
    Scan s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, expectedKVs);
   
    // Grab first 4 rows (6 cols per row)
    expectedRows = 4;
    expectedKeys = colsPerRow;
    s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, Arrays.copyOf(expectedKVs, 24));
   
    // Grab first 2 rows
    expectedRows = 2;
    expectedKeys = colsPerRow;
    s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, Arrays.copyOf(expectedKVs, 12));

    // Grab first row
    expectedRows = 1;
    expectedKeys = colsPerRow;
    s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));   
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

    @Override
    void testRow(final int i) throws IOException {
      Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
      ResultScanner s = this.table.getScanner(scan);
      //int count = 0;
      for (Result rr = null; (rr = s.next()) != null;) {
        // LOG.info("" + count++ + " " + rr.toString());
      }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

    @Override
    void testRow(final int i) throws IOException {
      Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
      ResultScanner s = this.table.getScanner(scan);
      //int count = 0;
      for (Result rr = null; (rr = s.next()) != null;) {
        // LOG.info("" + count++ + " " + rr.toString());
      }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

   
    @Override
    void testRow(final int i) throws IOException {
      Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
      ResultScanner s = this.table.getScanner(scan);
      //int count = 0;
      for (Result rr = null; (rr = s.next()) != null;) {
        // LOG.info("" + count++ + " " + rr.toString());
      }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

    @Override
    void testRow(final int i) throws IOException {
      Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
      ResultScanner s = this.table.getScanner(scan);
      for (Result rr; (rr = s.next()) != null;) ;
      s.close();
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

        }
        planSteps.add(buf.toString());
       
        Scan scan = context.getScan();
        Filter filter = scan.getFilter();
        PageFilter pageFilter = null;
        if (filter != null) {
            int offset = 0;
            boolean hasFirstKeyOnlyFilter = false;
            String filterDesc = "";
            if (hasSkipScanFilter) {
                if (filter instanceof FilterList) {
                    List<Filter> filterList = ((FilterList) filter).getFilters();
                    if (filterList.get(0) instanceof FirstKeyOnlyFilter) {
                        hasFirstKeyOnlyFilter = true;
                        offset = 1;
                    }
                    if (filterList.size() > offset+1) {
                        filterDesc = filterList.get(offset+1).toString();
                        pageFilter = getPageFilter(filterList);
                    }
                }
            } else if (filter instanceof FilterList) {
                List<Filter> filterList = ((FilterList) filter).getFilters();
                if (filterList.get(0) instanceof FirstKeyOnlyFilter) {
                    hasFirstKeyOnlyFilter = true;
                    offset = 1;
                }
                if (filterList.size() > offset) {
                    filterDesc = filterList.get(offset).toString();
                    pageFilter = getPageFilter(filterList);
                }
            } else {
                if (filter instanceof FirstKeyOnlyFilter) {
                    hasFirstKeyOnlyFilter = true;
                } else {
                    filterDesc = filter.toString();
                }
            }
            if (filterDesc.length() > 0) {
                planSteps.add("    SERVER FILTER BY " + (hasFirstKeyOnlyFilter ? "FIRST KEY ONLY AND " : "") + filterDesc);
            } else if (hasFirstKeyOnlyFilter) {
                planSteps.add("    SERVER FILTER BY FIRST KEY ONLY");
            }
            if (pageFilter != null) {
                planSteps.add("    SERVER " + pageFilter.getPageSize() + " ROW LIMIT");
            }
        }
        Integer groupByLimit = null;
        byte[] groupByLimitBytes = scan.getAttribute(BaseScannerRegionObserver.GROUP_BY_LIMIT);
        if (groupByLimitBytes != null) {
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

                for (PColumnFamily family : table.getColumnFamilies()) {
                    scan.addFamily(family.getName().getBytes());
                }
        } // TODO adding all CFs here is not correct. It should be done only after ColumnProjectionOptimization.
        if (limit != null) {
            ScanUtil.andFilterAtEnd(scan, new PageFilter(limit));
        }

        doColumnProjectionOptimization(context, scan, table, statement);
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

   
    // Grab all 6 rows
    long expectedRows = 6;
    long expectedKeys = colsPerRow;
    Scan s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, expectedKVs);
   
    // Grab first 4 rows (6 cols per row)
    expectedRows = 4;
    expectedKeys = colsPerRow;
    s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, Arrays.copyOf(expectedKVs, 24));
   
    // Grab first 2 rows
    expectedRows = 2;
    expectedKeys = colsPerRow;
    s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, Arrays.copyOf(expectedKVs, 12));

    // Grab first row
    expectedRows = 1;
    expectedKeys = colsPerRow;
    s = new Scan();
    s.setFilter(new PageFilter(expectedRows));
    verifyScan(s, expectedRows, expectedKeys);
    s.setFilter(new PageFilter(expectedRows));
    verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));   
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

            if (endpoint.getFilters() != null) {
                filters.addAll(endpoint.getFilters());
            }

            if (maxMessagesPerPoll > 0) {
                filters.add(new PageFilter(maxMessagesPerPoll));
            }
            Filter compoundFilter = new FilterList(filters);
            scan.setFilter(compoundFilter);

            if (rowModel != null && rowModel.getCells() != null) {
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.PageFilter

   
    @Override
    void testRow(final int i) throws IOException {
      Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
      ResultScanner s = this.table.getScanner(scan);
      //int count = 0;
      for (Result rr = null; (rr = s.next()) != null;) {
        // LOG.info("" + count++ + " " + rr.toString());
      }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.