Package org.apache.hadoop.hbase.client

Examples of org.apache.hadoop.hbase.client.HTableInterface


              for(Table t : tables.annotations()) {
                table = t;
              }
            }
            if(table!=null) {
              HTableInterface hbase = pool.getTable(table.name().getBytes())
              processor.process(new ChukwaArchiveKey(), chunk, output, reporter);
              hbase.put(output.getKeyValues());
              pool.putTable(hbase);
            }
          } catch (Exception e) {
            log.warn(output.getKeyValues());
            log.warn(ExceptionUtil.getStackTrace(e));
View Full Code Here


    seriesName.append(":");
    seriesName.append(column);

    Series series = new Series(seriesName.toString());
    try {
      HTableInterface table = pool.getTable(tableName);
      Calendar c = Calendar.getInstance();
      c.setTimeInMillis(startTime);
      c.set(Calendar.MINUTE, 0);
      c.set(Calendar.SECOND, 0);
      c.set(Calendar.MILLISECOND, 0);
      String startRow = c.getTimeInMillis()+rkey;
      Scan scan = new Scan();
      scan.addColumn(family.getBytes(), column.getBytes());
      scan.setStartRow(startRow.getBytes());
      scan.setTimeRange(startTime, endTime);
      scan.setMaxVersions();
      if(filterByRowKey) {
        RowFilter rf = new RowFilter(CompareOp.EQUAL, new
            RegexStringComparator("[0-9]+-"+rkey+"$"));
        scan.setFilter(rf);
      }
      ResultScanner results = table.getScanner(scan);
      Iterator<Result> it = results.iterator();
      // TODO: Apply discrete wavelet transformation to limit the output
      // size to 1000 data points for graphing optimization. (i.e jwave)
      while(it.hasNext()) {
        Result result = it.next();
        String temp = new String(result.getValue(family.getBytes(), column.getBytes()));
        double value = Double.parseDouble(temp);
        // TODO: Pig Store function does not honor HBase timestamp, hence need to parse rowKey for timestamp.
        String buf = new String(result.getRow());
        Long timestamp = Long.parseLong(buf.split("-")[0]);
        // If Pig Store function can honor HBase timestamp, use the following line is better.
        // series.add(result.getCellValue().getTimestamp(), value);
        series.add(timestamp, value);
      }
      results.close();
      table.close();
    } catch(Exception e) {
      log.error(ExceptionUtil.getStackTrace(e));
    }
    return series;
  }
View Full Code Here

  }

  public static Set<String> getFamilyNames(String tableName) {
    Set<String> familyNames = new CopyOnWriteArraySet<String>();
    try {
      HTableInterface table = pool.getTable(tableName);
      Set<byte[]> families = table.getTableDescriptor().getFamiliesKeys();
      for(byte[] name : families) {
        familyNames.add(new String(name));
      }
      table.close();
    } catch(Exception e) {
      log.error(ExceptionUtil.getStackTrace(e));
    }
    return familyNames;
   
View Full Code Here

  }
 
  public static Set<String> getColumnNames(String tableName, String family, long startTime, long endTime, boolean fullScan) {
    Set<String> columnNames = new CopyOnWriteArraySet<String>();
    try {
      HTableInterface table = pool.getTable(tableName);
      Scan scan = new Scan();
      if(!fullScan) {
        // Take sample columns of the recent time.
        StringBuilder temp = new StringBuilder();
        temp.append(endTime-300000L);
        scan.setStartRow(temp.toString().getBytes());
        temp.setLength(0);
        temp.append(endTime);
        scan.setStopRow(temp.toString().getBytes());
      } else {
        StringBuilder temp = new StringBuilder();
        temp.append(startTime);
        scan.setStartRow(temp.toString().getBytes());
        temp.setLength(0);
        temp.append(endTime);
        scan.setStopRow(temp.toString().getBytes());
      }
      scan.addFamily(family.getBytes());
      ResultScanner results = table.getScanner(scan);
      Iterator<Result> it = results.iterator();
      if(fullScan) {
        while(it.hasNext()) {
          getColumnNamesHelper(columnNames, it);
        }       
      } else {
        getColumnNamesHelper(columnNames, it);       
      }
      results.close();
      table.close();
    } catch(Exception e) {
      log.error(ExceptionUtil.getStackTrace(e));
    }
    return columnNames;
  }
View Full Code Here

    return columnNames;
  }
 
  public static Set<String> getRowNames(String tableName, String family, String qualifier, long startTime, long endTime, boolean fullScan) {
    Set<String> rows = new HashSet<String>();
    HTableInterface table = pool.getTable(tableName);
    try {
      Scan scan = new Scan();
      scan.addColumn(family.getBytes(), qualifier.getBytes());
      if(!fullScan) {
        // Take sample columns of the recent time.
        StringBuilder temp = new StringBuilder();
        temp.append(endTime-300000L);
        scan.setStartRow(temp.toString().getBytes());
        temp.setLength(0);
        temp.append(endTime);
        scan.setStopRow(temp.toString().getBytes());
      } else {
        StringBuilder temp = new StringBuilder();
        temp.append(startTime);
        scan.setStartRow(temp.toString().getBytes());
        temp.setLength(0);
        temp.append(endTime);
        scan.setStopRow(temp.toString().getBytes());
      }
      ResultScanner results = table.getScanner(scan);
      Iterator<Result> it = results.iterator();
      while(it.hasNext()) {
        Result result = it.next();
        String buffer = new String(result.getRow());
        String[] parts = buffer.split("-", 2);
        if(!rows.contains(parts[1])) {
          rows.add(parts[1]);
        }   
      }
      results.close();
      table.close();
    } catch(Exception e) {
      log.error(ExceptionUtil.getStackTrace(e));
    }
    return rows;   
  }
View Full Code Here

  public static Set<String> getClusterNames(long startTime, long endTime) {
    String tableName = "SystemMetrics";
    String family = "system";
    String column = "ctags";
    Set<String> clusters = new HashSet<String>();
    HTableInterface table = pool.getTable(tableName);
    Pattern p = Pattern.compile("\\s*cluster=\"(.*?)\"");
    try {
      Scan scan = new Scan();
      scan.addColumn(family.getBytes(), column.getBytes());
      scan.setTimeRange(startTime, endTime);
      ResultScanner results = table.getScanner(scan);
      Iterator<Result> it = results.iterator();
      while(it.hasNext()) {
        Result result = it.next();
        String buffer = new String(result.getValue(family.getBytes(), column.getBytes()));
        Matcher m = p.matcher(buffer);
        if(m.matches()) {
          clusters.add(m.group(1));
        }
      }
      results.close();
      table.close();
    } catch(Exception e) {
      log.error(ExceptionUtil.getStackTrace(e));
    }
    return clusters;
  }
View Full Code Here

   */
  protected void batch(byte[] tableName, Collection<List<Row>> allRows) throws IOException {
    if (allRows.isEmpty()) {
      return;
    }
    HTableInterface table = null;
    try {
      table = this.sharedHtableCon.getTable(tableName);
      for (List<Row> rows : allRows) {
        table.batch(rows);
        this.metrics.appliedOpsRate.inc(rows.size());
      }
    } catch (InterruptedException ix) {
      throw new IOException(ix);
    } finally {
      if (table != null) {
        table.close();
      }
    }
  }
View Full Code Here

        this.rowModel = endpoint.getRowModel();
    }

    @Override
    protected int poll() throws Exception {
        HTableInterface table = tablePool.getTable(tableName);
        try {
            shutdownRunningTask = null;
            pendingExchanges = 0;

            Queue<Exchange> queue = new LinkedList<Exchange>();

            Scan scan = new Scan();
            List<Filter> filters = new LinkedList<Filter>();
            if (endpoint.getFilters() != null) {
                filters.addAll(endpoint.getFilters());
            }

            if (maxMessagesPerPoll > 0) {
                filters.add(new PageFilter(maxMessagesPerPoll));
            }
            Filter compoundFilter = new FilterList(filters);
            scan.setFilter(compoundFilter);

            if (rowModel != null && rowModel.getCells() != null) {
                Set<HBaseCell> cellModels = rowModel.getCells();
                for (HBaseCell cellModel : cellModels) {
                    scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(cellModel.getFamily()), HBaseHelper.getHBaseFieldAsBytes(cellModel.getQualifier()));
                }
            }

            ResultScanner scanner = table.getScanner(scan);
            int exchangeCount = 0;
            // The next three statements are used just to get a reference to the BodyCellMappingStrategy instance.
            Exchange exchange = endpoint.createExchange();
            exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
            CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
            for (Result result = scanner.next(); (exchangeCount < maxMessagesPerPoll || maxMessagesPerPoll <= 0) && result != null; result = scanner.next()) {
                HBaseData data = new HBaseData();
                HBaseRow resultRow = new HBaseRow();
                resultRow.apply(rowModel);
                byte[] row = result.getRow();
                resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(rowModel.getRowType(), row));

                List<KeyValue> keyValues = result.list();
                if (keyValues != null) {
                    Set<HBaseCell> cellModels = rowModel.getCells();
                    if (cellModels.size() > 0) {
                        for (HBaseCell modelCell : cellModels) {
                            HBaseCell resultCell = new HBaseCell();
                            String family = modelCell.getFamily();
                            String column = modelCell.getQualifier();
                            resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(modelCell.getValueType(),
                                    result.getValue(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column))));
                            resultCell.setFamily(modelCell.getFamily());
                            resultCell.setQualifier(modelCell.getQualifier());
                            resultRow.getCells().add(resultCell);
                        }
                    } else {
                        // just need to put every key value into the result Cells
                        for (KeyValue keyValue : keyValues) {
                            String qualifier = new String(keyValue.getQualifier());
                            String family = new String(keyValue.getFamily());
                            HBaseCell resultCell = new HBaseCell();
                            resultCell.setFamily(family);
                            resultCell.setQualifier(qualifier);
                            resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(String.class, keyValue.getValue()));
                            resultRow.getCells().add(resultCell);
                        }
                    }
              
                    data.getRows().add(resultRow);
                    exchange = endpoint.createExchange();
                    // Probably overkill but kept it here for consistency.
                    exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, CellMappingStrategyFactory.BODY);
                    mappingStrategy.applyScanResults(exchange.getIn(), data);
                    //Make sure that there is a header containing the marked row ids, so that they can be deleted.
                    exchange.getIn().setHeader(HbaseAttribute.HBASE_MARKED_ROW_ID.asHeader(), result.getRow());
                    queue.add(exchange);
                    exchangeCount++;
                }
            }
            scanner.close();
            return queue.isEmpty() ? 0 : processBatch(CastUtils.cast(queue));
        } finally {
            table.close();
        }
    }
View Full Code Here

    /**
     * Delegates to the {@link HBaseRemoveHandler}.
     */
    private void remove(byte[] row) throws IOException {
        HTableInterface table = tablePool.getTable(tableName);
        try {
            endpoint.getRemoveHandler().remove(table, row);
        } finally {
            table.close();
        }
    }
View Full Code Here

        HConstants.DEFAULT_HBASE_META_SCANNER_CACHING);
    scan.setCaching(rows);
    FilterList allFilters = new FilterList();
    allFilters.addFilter(new PrefixFilter(FConstants.TABLEROW_PREFIX));
    scan.setFilter(allFilters);
    HTableInterface htable = null;
    try {
      htable = getHTable();
      ResultScanner scanner = htable.getScanner(scan);
      for (Result r = scanner.next(); r != null; r = scanner.next()) {
        byte[] value = r.getValue(FConstants.CATALOG_FAMILY,
            FConstants.TABLEINFO);
        FTable ftable = FTable.convert(value);
        if (ftable == null) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.client.HTableInterface

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.