Package org.apache.hadoop.hbase.filter

Examples of org.apache.hadoop.hbase.filter.PrefixFilter


  private InternalScanner buildScanner(String keyPrefix, String value, HRegion r)
      throws IOException {
    // Defaults FilterList.Operator.MUST_PASS_ALL.
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
    // Only return rows where this column value exists in the row.
    SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"),
        Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
    filter.setFilterIfMissing(true);
    allFilters.addFilter(filter);
View Full Code Here


    arr.add("universe");
    obj = doType(conf,  arr, ArrayList.class);
    assertTrue(obj instanceof ArrayList);
    Assert.assertArrayEquals(list.toArray(), ((ArrayList)obj).toArray() );
    // Check that filters can be serialized
    obj = doType(conf, new PrefixFilter(HConstants.EMPTY_BYTE_ARRAY),
      PrefixFilter.class);
    assertTrue(obj instanceof PrefixFilter);
  }
View Full Code Here

    }
    scan.addFamily(FConstants.CATALOG_FAMILY);
    scan.setCaching(rows);
    if (tableName == null || tableName.length == 0) {
      FilterList allFilters = new FilterList();
      allFilters.addFilter(new PrefixFilter(tableName));
      scan.setFilter(allFilters);
    }
    int processedRows = 0;
    try {
      ResultScanner scanner = fmetaTable.getScanner(scan);
View Full Code Here

    scan.addFamily(FConstants.CATALOG_FAMILY);
    int rows = getConf().getInt(HConstants.HBASE_META_SCANNER_CACHING,
        HConstants.DEFAULT_HBASE_META_SCANNER_CACHING);
    scan.setCaching(rows);
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(FConstants.TABLEROW_PREFIX));
    scan.setFilter(allFilters);
    HTableInterface htable = null;
    try {
      htable = getHTable();
      ResultScanner scanner = htable.getScanner(scan);
View Full Code Here

  public List<EntityGroupInfo> getTableEntityGroups(final byte[] tableByte)
      throws MetaException {
    final List<EntityGroupInfo> entityGroupInfos = new LinkedList<EntityGroupInfo>();
    final byte[] startKey = tableByte;
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(tableByte));

    FMetaVisitor visitor = new FMetaVisitor() {
      @Override
      public boolean visit(Result r) throws IOException {
        if (r == null || r.isEmpty()) {
View Full Code Here

  public List<EntityGroupLocation> getEntityGroupLocations(
      final byte[] tableName) throws MetaException {
    final List<EntityGroupLocation> egLocations = new LinkedList<EntityGroupLocation>();
    final byte[] startKey = tableName;
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(tableName));

    FMetaVisitor visitor = new FMetaVisitor() {
      @Override
      public boolean visit(Result r) throws IOException {
        if (r == null || r.isEmpty()) {
View Full Code Here

  public List<Pair<EntityGroupInfo, ServerName>> getTableEntityGroupsAndLocations(
      final byte[] tableName, final boolean excludeOfflinedSplitParents)
      throws MetaException {
    byte[] startrow = tableName;
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(tableName));

    final List<Pair<EntityGroupInfo, ServerName>> entityGroupInfos = new ArrayList<Pair<EntityGroupInfo, ServerName>>();
    FMetaVisitor visitor = new FMetaVisitor() {
      @Override
      public boolean visit(Result r) throws IOException {
View Full Code Here

        byte [][] columnsArray = null;
        columnsArray = columns.toArray(new byte[0][]);
        Scan scan = new Scan(startAndPrefix);
        scan.addColumns(columnsArray);
        Filter f = new WhileMatchFilter(
            new PrefixFilter(startAndPrefix));
        scan.setFilter(f);
        return addScanner(table.getScanner(scan));
      } catch (IOException e) {
        throw new IOError(e.getMessage());
      }
View Full Code Here

      // Test filters are serialized properly.
      scan = new Scan(startRow);
      final String name = "testScan";
      byte [] prefix = Bytes.toBytes(name);
      scan.setFilter(new PrefixFilter(prefix));
      scanProto = ProtobufUtil.toScan(scan);
      desScan = ProtobufUtil.toScan(scanProto);
      Filter f = desScan.getFilter();
      assertTrue(f instanceof PrefixFilter);
    }
View Full Code Here

  private ResultScanner buildScanner(String keyPrefix, String value, HTable ht)
      throws IOException {
    // OurFilterList allFilters = new OurFilterList();
    FilterList allFilters = new FilterList(/* FilterList.Operator.MUST_PASS_ALL */);
    allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
    SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes
        .toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes
        .toBytes(value));
    filter.setFilterIfMissing(true);
    allFilters.addFilter(filter);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.filter.PrefixFilter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.