Package org.apache.hadoop.hbase

Examples of org.apache.hadoop.hbase.HTable


   
    public ArrayList<RegionDescriptor> getTableRegions(byte[] tableName)
        throws IOError {
      LOG.debug("getTableRegions: " + new String(tableName));
      try {
        HTable table = getTable(tableName);
        Text[] startKeys = table.getStartKeys();
        ArrayList<RegionDescriptor> regions = new ArrayList<RegionDescriptor>();
        for (int i = 0; i < startKeys.length; i++) {
          RegionDescriptor region = new RegionDescriptor();
          region.startKey = startKeys[i].toString().getBytes();
          regions.add(region);
View Full Code Here


      if (LOG.isDebugEnabled()) {
        LOG.debug("get: table=" + new String(tableName) + ", row="
            + new String(row) + ", col=" + new String(column));
      }
      try {
        HTable table = getTable(tableName);
        byte[] value = table.get(getText(row), getText(column));
        if (value == null) {
          throw new NotFound();
        }
        return value;
      } catch (IOException e) {
View Full Code Here

        LOG.debug("getVer: table=" + new String(tableName) + ", row="
            + new String(row) + ", col=" + new String(column) + ", numVers="
            + numVersions);
      }
      try {
        HTable table = getTable(tableName);
        byte[][] values = table.get(getText(row), getText(column), numVersions);
        if (values == null) {
          throw new NotFound();
        }
        return new ArrayList<byte[]>(Arrays.asList(values));
      } catch (IOException e) {
View Full Code Here

        LOG.debug("getVerTs: table=" + new String(tableName) + ", row="
            + new String(row) + ", col=" + new String(column) + ", ts="
            + timestamp + ", numVers=" + numVersions);
      }
      try {
        HTable table = getTable(tableName);
        byte[][] values = table.get(getText(row), getText(column), timestamp,
            numVersions);
        if (values == null) {
          throw new NotFound();
        }
        return new ArrayList<byte[]>(Arrays.asList(values));
View Full Code Here

      if (LOG.isDebugEnabled()) {
        LOG.debug("getRowTs: table=" + new String(tableName) + ", row="
            + new String(row) + ", ts=" + timestamp);
      }
      try {
        HTable table = getTable(tableName);
        SortedMap<Text, byte[]> values = table.getRow(getText(row), timestamp);
        // copy the map from type <Text, byte[]> to <byte[], byte[]>
        HashMap<byte[], byte[]> returnValues = new HashMap<byte[], byte[]>();
        for (Entry<Text, byte[]> e : values.entrySet()) {
          returnValues.put(e.getKey().getBytes(), e.getValue());
        }
View Full Code Here

        LOG.debug("put: table=" + new String(tableName) + ", row="
            + new String(row) + ", col=" + new String(column)
            + ", value.length=" + value.length);
      }
      try {
        HTable table = getTable(tableName);
        long lockid = table.startUpdate(getText(row));
        table.put(lockid, getText(column), value);
        table.commit(lockid);
      } catch (IOException e) {
        throw new IOError(e.getMessage());
      }
    }
View Full Code Here

        LOG.debug("deleteAllTs: table=" + new String(tableName) + ", row="
            + new String(row) + ", col=" + new String(column) + ", ts="
            + timestamp);
      }
      try {
        HTable table = getTable(tableName);
        table.deleteAll(getText(row), getText(column), timestamp);
      } catch (IOException e) {
        throw new IOError(e.getMessage());
      }
    }
View Full Code Here

      if (LOG.isDebugEnabled()) {
        LOG.debug("deleteAllRowTs: table=" + new String(tableName) + ", row="
            + new String(row) + ", ts=" + timestamp);
      }
      try {
        HTable table = getTable(tableName);
        table.deleteAll(getText(row), timestamp);
      } catch (IOException e) {
        throw new IOError(e.getMessage());
      }
    }
View Full Code Here

          }
        }
      }
     
      Long lockid = null;
      HTable table = null;
     
      try {
        table = getTable(tableName);
        lockid = table.startUpdate(getText(row));
        for (Mutation m : mutations) {
          if (m.isDelete) {
            table.delete(lockid, getText(m.column));
          } else {
            table.put(lockid, getText(m.column), m.value);
          }
        }
        table.commit(lockid, timestamp);
      } catch (IOException e) {
        if (lockid != null) {
          table.abort(lockid);
        }
        throw new IOError(e.getMessage());
      }
    }
View Full Code Here

      if (LOG.isDebugEnabled()) {
        LOG.debug("scannerOpen: table=" + getText(tableName) + ", start="
            + getText(startRow) + ", columns=" + columns.toString());
      }
      try {
        HTable table = getTable(tableName);
        Text[] columnsText = new Text[columns.size()];
        for (int i = 0; i < columns.size(); ++i) {
          columnsText[i] = getText(columns.get(i));
        }
        HScannerInterface scanner = table.obtainScanner(columnsText,
            getText(startRow));
        return addScanner(scanner);
      } catch (IOException e) {
        throw new IOError(e.getMessage());
      }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.HTable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.