Package org.apache.hadoop.hbase.client

Examples of org.apache.hadoop.hbase.client.Scan


              .preCompactScannerOpen(store, scanners,
                majorCompaction ? ScanType.MAJOR_COMPACT : ScanType.MINOR_COMPACT, earliestPutTs,
                request);
        }
        if (scanner == null) {
          Scan scan = new Scan();
          scan.setMaxVersions(store.getFamily().getMaxVersions());
          /* Include deletes, unless we are doing a major compaction */
          scanner = new StoreScanner(store, store.getScanInfo(), scan, scanners,
            majorCompaction? ScanType.MAJOR_COMPACT : ScanType.MINOR_COMPACT,
            smallestReadPoint, earliestPutTs);
        }
View Full Code Here


    KeyValueScanner memstoreScanner = new CollectionBackedScanner(set, this.comparator);
    if (getHRegion().getCoprocessorHost() != null) {
      scanner = getHRegion().getCoprocessorHost().preFlushScannerOpen(this, memstoreScanner);
    }
    if (scanner == null) {
      Scan scan = new Scan();
      scan.setMaxVersions(scanInfo.getMaxVersions());
      scanner = new StoreScanner(this, scanInfo, scan,
          Collections.singletonList(memstoreScanner), ScanType.MINOR_COMPACT,
          this.region.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP);
    }
    if (getHRegion().getCoprocessorHost() != null) {
View Full Code Here

    // Start key is just the table name with delimiters
    byte[] startKey = Bytes.toBytes(strName + ",,");
    // Stop key appends the smallest possible char to the table name
    byte[] stopKey = Bytes.toBytes(strName + " ,,");

    Scan scan = new Scan(startKey);
    scan.setStopRow(stopKey);
    return scan;
  }
View Full Code Here

   */
  public static void fullScan(HRegionInterface hRegionInterface,
                              Visitor visitor, final byte[] regionName,
                              byte[] startrow) throws IOException {
    if (hRegionInterface == null) return;
    Scan scan = new Scan();
    if (startrow != null) scan.setStartRow(startrow);
    scan.addFamily(HConstants.CATALOG_FAMILY);
    long scannerid = hRegionInterface.openScanner(regionName, scan);
    try {
      Result data;
      while((data = hRegionInterface.next(scannerid)) != null) {
        if (!data.isEmpty()) visitor.visit(data);
View Full Code Here

   * @throws IOException
   */
  static void fullScan(CatalogTracker catalogTracker,
    final Visitor visitor, final byte [] startrow, final boolean scanRoot)
  throws IOException {
    Scan scan = new Scan();
    if (startrow != null) scan.setStartRow(startrow);
    if (startrow == null && !scanRoot) {
      int caching = catalogTracker.getConnection().getConfiguration()
          .getInt(HConstants.HBASE_META_SCANNER_CACHING, 100);
      scan.setCaching(caching);
    }
    scan.addFamily(HConstants.CATALOG_FAMILY);
    HTable metaTable = scanRoot?
      getRootHTable(catalogTracker): getMetaHTable(catalogTracker);
    ResultScanner scanner = metaTable.getScanner(scan);
    try {
      Result data;
View Full Code Here

   * @throws IOException When reading the scan instance fails.
   */
  static Scan convertStringToScan(String base64) throws IOException {
    ByteArrayInputStream bis = new ByteArrayInputStream(Base64.decode(base64));
    DataInputStream dis = new DataInputStream(bis);
    Scan scan = new Scan();
    scan.readFields(dis);
    return scan;
  }
View Full Code Here

    public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
        Map<ByteBuffer, ByteBuffer> attributes)
        throws IOError {
      try {
        HTable table = getTable(tableName);
        Scan scan = new Scan();
        addAttributes(scan, attributes);
        if (tScan.isSetStartRow()) {
          scan.setStartRow(tScan.getStartRow());
        }
        if (tScan.isSetStopRow()) {
          scan.setStopRow(tScan.getStopRow());
        }
        if (tScan.isSetTimestamp()) {
          scan.setTimeRange(Long.MIN_VALUE, tScan.getTimestamp());
        }
        if (tScan.isSetCaching()) {
          scan.setCaching(tScan.getCaching());
        }
        if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
          for(ByteBuffer column : tScan.getColumns()) {
            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
            if(famQf.length == 1) {
              scan.addFamily(famQf[0]);
            } else {
              scan.addColumn(famQf[0], famQf[1]);
            }
          }
        }
        if (tScan.isSetFilterString()) {
          ParseFilter parseFilter = new ParseFilter();
          scan.setFilter(
              parseFilter.parseFilterString(tScan.getFilterString()));
        }
        return addScanner(table.getScanner(scan));
      } catch (IOException e) {
        LOG.warn(e.getMessage(), e);
View Full Code Here

    public int scannerOpen(ByteBuffer tableName, ByteBuffer startRow,
        List<ByteBuffer> columns,
        Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
      try {
        HTable table = getTable(tableName);
        Scan scan = new Scan(getBytes(startRow));
        addAttributes(scan, attributes);
        if(columns != null && columns.size() != 0) {
          for(ByteBuffer column : columns) {
            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
            if(famQf.length == 1) {
              scan.addFamily(famQf[0]);
            } else {
              scan.addColumn(famQf[0], famQf[1]);
            }
          }
        }
        return addScanner(table.getScanner(scan));
      } catch (IOException e) {
View Full Code Here

        ByteBuffer stopRow, List<ByteBuffer> columns,
        Map<ByteBuffer, ByteBuffer> attributes)
        throws IOError, TException {
      try {
        HTable table = getTable(tableName);
        Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
        addAttributes(scan, attributes);
        if(columns != null && columns.size() != 0) {
          for(ByteBuffer column : columns) {
            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
            if(famQf.length == 1) {
              scan.addFamily(famQf[0]);
            } else {
              scan.addColumn(famQf[0], famQf[1]);
            }
          }
        }
        return addScanner(table.getScanner(scan));
      } catch (IOException e) {
View Full Code Here

                                     List<ByteBuffer> columns,
        Map<ByteBuffer, ByteBuffer> attributes)
        throws IOError, TException {
      try {
        HTable table = getTable(tableName);
        Scan scan = new Scan(getBytes(startAndPrefix));
        addAttributes(scan, attributes);
        Filter f = new WhileMatchFilter(
            new PrefixFilter(getBytes(startAndPrefix)));
        scan.setFilter(f);
        if (columns != null && columns.size() != 0) {
          for(ByteBuffer column : columns) {
            byte [][] famQf = KeyValue.parseColumn(getBytes(column));
            if(famQf.length == 1) {
              scan.addFamily(famQf[0]);
            } else {
              scan.addColumn(famQf[0], famQf[1]);
            }
          }
        }
        return addScanner(table.getScanner(scan));
      } catch (IOException e) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.client.Scan

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.