Examples of FilterList


Examples of org.apache.hadoop.hbase.filter.FilterList

      put.add(family, qual1, 2L, Bytes.toBytes(2L));
      region.put(put);

      Scan idxScan = new Scan();
      idxScan.addFamily(family);
      idxScan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL, Arrays.<Filter> asList(
          new SingleColumnValueFilter(family, qual1, CompareOp.GREATER_OR_EQUAL,
              new BinaryComparator(Bytes.toBytes(0L))), new SingleColumnValueFilter(family, qual1,
              CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L))))));
      InternalScanner scanner = region.getScanner(idxScan);
      List<Cell> res = new ArrayList<Cell>();
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

    }

    protected Scan constructScan(byte[] valuePrefix) throws IOException {
      Scan scan = new Scan();
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      FilterList list = new FilterList();
      list.addFilter(new SingleColumnValueFilter(
          FAMILY_NAME, QUALIFIER_NAME, CompareFilter.CompareOp.EQUAL,
          new BinaryComparator(valuePrefix)
      ));
      if (this.filterAll) {
        list.addFilter(new FilterAllFilter());
      }
      scan.setFilter(list);
      return scan;
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

  }

  private InternalScanner buildScanner(String keyPrefix, String value, HRegion r)
  throws IOException {
    // Defaults FilterList.Operator.MUST_PASS_ALL.
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
    // Only return rows where this column value exists in the row.
    SingleColumnValueFilter filter =
      new SingleColumnValueFilter(Bytes.toBytes("trans-tags"),
        Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
    filter.setFilterIfMissing(true);
    allFilters.addFilter(filter);
    Scan scan = new Scan();
    scan.addFamily(Bytes.toBytes("trans-blob"));
    scan.addFamily(Bytes.toBytes("trans-type"));
    scan.addFamily(Bytes.toBytes("trans-date"));
    scan.addFamily(Bytes.toBytes("trans-tags"));
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

      put.add(family, qual1, 2L, Bytes.toBytes(2L));
      region.put(put);

      Scan idxScan = new Scan();
      idxScan.addFamily(family);
      idxScan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL,
          Arrays.<Filter>asList(new SingleColumnValueFilter(family, qual1,
              CompareOp.GREATER_OR_EQUAL,
              new BinaryComparator(Bytes.toBytes(0L))),
              new SingleColumnValueFilter(family, qual1, CompareOp.LESS_OR_EQUAL,
                  new BinaryComparator(Bytes.toBytes(3L)))
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

  }

  private ResultScanner buildScanner(String keyPrefix, String value, HTable ht)
      throws IOException {
    // OurFilterList allFilters = new OurFilterList();
    FilterList allFilters = new FilterList(/* FilterList.Operator.MUST_PASS_ALL */);
    allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
    SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes
        .toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes
        .toBytes(value));
    filter.setFilterIfMissing(true);
    allFilters.addFilter(filter);

    // allFilters.addFilter(new
    // RowExcludingSingleColumnValueFilter(Bytes.toBytes("trans-tags"),
    // Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)));

View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

    @Override
    void testRow(final int i) throws IOException {
      Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      FilterList list = new FilterList();
      if (this.filterAll) {
        list.addFilter(new FilterAllFilter());
      }
      list.addFilter(new WhileMatchFilter(new PageFilter(120)));
      scan.setFilter(list);
      ResultScanner s = this.table.getScanner(scan);
      for (Result rr; (rr = s.next()) != null;) ;
      s.close();
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

            }

            if (maxMessagesPerPoll > 0) {
                filters.add(new PageFilter(maxMessagesPerPoll));
            }
            Filter compoundFilter = new FilterList(filters);
            scan.setFilter(compoundFilter);

            if (rowModel != null && rowModel.getCells() != null) {
                Set<HBaseCell> cellModels = rowModel.getCells();
                for (HBaseCell cellModel : cellModels) {
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

            query.getACLStrategy() ? AccessControlFilter.Strategy.CHECK_CELL_FIRST :
              AccessControlFilter.Strategy.CHECK_TABLE_AND_CF_ONLY,
            cfVsMaxVersions);
          // wrap any existing filter
          if (filter != null) {
            ourFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL,
              Lists.newArrayList(ourFilter, filter));
          }
          authResult.setAllowed(true);;
          authResult.setReason("Access allowed with filter");
          switch (opType) {
          case GET:
          case EXISTS:
            ((Get)query).setFilter(ourFilter);
            break;
          case SCAN:
            ((Scan)query).setFilter(ourFilter);
            break;
          default:
            throw new RuntimeException("Unhandled operation " + opType);
          }
        }
      } else {
        // New behavior: Any access we might be granted is more fine-grained
        // than whole table or CF. Simply inject a filter and return what is
        // allowed. We will not throw an AccessDeniedException. This is a
        // behavioral change since 0.96.
        Filter ourFilter = new AccessControlFilter(authManager, user, table,
          query.getACLStrategy() ? AccessControlFilter.Strategy.CHECK_CELL_FIRST :
            AccessControlFilter.Strategy.CHECK_CELL_DEFAULT,
          cfVsMaxVersions);
        // wrap any existing filter
        if (filter != null) {
          ourFilter = new FilterList(FilterList.Operator.MUST_PASS_ALL,
            Lists.newArrayList(ourFilter, filter));
        }
        authResult.setAllowed(true);;
        authResult.setReason("Access allowed with filter");
        switch (opType) {
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

    }

    @Override
    void testRow(final int i) throws IOException {
      Scan scan = new Scan(getRandomRow(this.rand, opts.totalRows));
      FilterList list = new FilterList();
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      if (opts.filterAll) {
        list.addFilter(new FilterAllFilter());
      }
      list.addFilter(new WhileMatchFilter(new PageFilter(120)));
      scan.setFilter(list);
      ResultScanner s = this.table.getScanner(scan);
      for (Result rr; (rr = s.next()) != null;) ;
      s.close();
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.filter.FilterList

        if (scanner != null) scanner.close();
      }
    }

    protected Scan constructScan(byte[] valuePrefix) throws IOException {
      FilterList list = new FilterList();
      Filter filter = new SingleColumnValueFilter(
          FAMILY_NAME, QUALIFIER_NAME, CompareFilter.CompareOp.EQUAL,
          new BinaryComparator(valuePrefix)
      );
      list.addFilter(filter);
      if(opts.filterAll) {
        list.addFilter(new FilterAllFilter());
      }
      Scan scan = new Scan();
      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
      scan.setFilter(list);
      return scan;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.