Package org.apache.hadoop.hbase.filter

Examples of org.apache.hadoop.hbase.filter.Filter


        if (scanner != null) scanner.close();
      }
    }

    protected Scan constructScan(byte[] valuePrefix) throws IOException {
      Filter filter = new SingleColumnValueFilter(
          FAMILY_NAME, QUALIFIER_NAME, CompareFilter.CompareOp.EQUAL,
          new BinaryComparator(valuePrefix)
      );
      Scan scan = new Scan();
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
View Full Code Here


    // Make sure key is of some substance... non-null and > than first key.
    assertTrue(key != null && key.length > 0 &&
      Bytes.BYTES_COMPARATOR.compare(key, new byte [] {'a', 'a', 'a'}) >= 0);
    LOG.info("Key=" + Bytes.toString(key));
    Scan s = startRow == null? new Scan(): new Scan(startRow);
    Filter f = new RowFilter(op, new BinaryComparator(key));
    f = new WhileMatchFilter(f);
    s.setFilter(f);
    return s;
  }
View Full Code Here

      put.add(FAMILY, QUALIFIERS[i], VALUE);
      ht.put(put);
    }
    Scan scan = new Scan();
    scan.addFamily(FAMILY);
    Filter filter = new QualifierFilter(CompareOp.EQUAL,
      new RegexStringComparator("col[1-5]"));
    scan.setFilter(filter);
    ResultScanner scanner = ht.getScanner(scan);
    int expectedIndex = 1;
    for(Result result : ht.getScanner(scan)) {
View Full Code Here

      put.add(FAMILY, QUALIFIERS[i], VALUE);
      ht.put(put);
    }
    Scan scan = new Scan();
    scan.addFamily(FAMILY);
    Filter filter = new KeyOnlyFilter(true);
    scan.setFilter(filter);
    ResultScanner scanner = ht.getScanner(scan);
    int count = 0;
    for(Result result : ht.getScanner(scan)) {
      assertEquals(result.size(), 1);
View Full Code Here

  public void testMaxWithFilter() throws Throwable {
    BigDecimal max = BigDecimal.ZERO;
    AggregationClient aClient = new AggregationClient(conf);
    Scan scan = new Scan();
    scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
    Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
    scan.setFilter(f);
    final ColumnInterpreter<BigDecimal, BigDecimal> ci = new BigDecimalColumnInterpreter();
    max = aClient.max(TEST_TABLE, ci, scan);
    assertEquals(null, max);
  }
View Full Code Here

  @Test
  public void testMinWithFilter() throws Throwable {
    AggregationClient aClient = new AggregationClient(conf);
    Scan scan = new Scan();
    scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
    Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
    scan.setFilter(f);
    final ColumnInterpreter<BigDecimal, BigDecimal> ci = new BigDecimalColumnInterpreter();
    BigDecimal min = null;
    min = aClient.min(TEST_TABLE, ci, scan);
    assertEquals(null, min);
View Full Code Here

  }

  @Test
  public void testSumWithFilter() throws Throwable {
    AggregationClient aClient = new AggregationClient(conf);
    Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
    Scan scan = new Scan();
    scan.addFamily(TEST_FAMILY);
    scan.setFilter(f);
    final ColumnInterpreter<BigDecimal, BigDecimal> ci = new BigDecimalColumnInterpreter();
    BigDecimal sum = null;
View Full Code Here

  @Test
  public void testAvgWithFilter() throws Throwable {
    AggregationClient aClient = new AggregationClient(conf);
    Scan scan = new Scan();
    scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
    Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
    scan.setFilter(f);
    final ColumnInterpreter<BigDecimal, BigDecimal> ci = new BigDecimalColumnInterpreter();
    Double avg = null;
    avg = aClient.avg(TEST_TABLE, ci, scan);
    assertEquals(Double.NaN, avg, 0);
View Full Code Here

  }

  @Test
  public void testStdWithFilter() throws Throwable {
    AggregationClient aClient = new AggregationClient(conf);
    Filter f = new PrefixFilter(Bytes.toBytes("foo:bar"));
    Scan scan = new Scan();
    scan.addFamily(TEST_FAMILY);
    scan.setFilter(f);
    final ColumnInterpreter<BigDecimal, BigDecimal> ci = new BigDecimalColumnInterpreter();
    Double std = null;
View Full Code Here

    EqualsExpression equalsExpression = new EqualsExpression(column, value);
    singleIndexExpression.addEqualsExpression(equalsExpression);

    Scan scan = new Scan();
    scan.setAttribute(Constants.INDEX_EXPRESSION, IndexUtils.toBytes(singleIndexExpression));
    Filter filter = new SingleColumnValueFilter(FAMILY1, QUALIFIER1, CompareOp.EQUAL, value);
    scan.setFilter(filter);
    ScanFilterEvaluator evaluator = new ScanFilterEvaluator();
    List<IndexSpecification> indices = new ArrayList<IndexSpecification>();
    IndexSpecification index = new IndexSpecification(indexName);
    HColumnDescriptor colDesc = new HColumnDescriptor(FAMILY1);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.filter.Filter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.