Examples of HStoreKey


Examples of org.apache.hadoop.hbase.HStoreKey

      // Write columns named 1, 2, 3, etc. and then values of single byte
      // 1, 2, 3...
      long timestamp = System.currentTimeMillis();
      TreeMap<HStoreKey, byte []> cols = new TreeMap<HStoreKey, byte []>();
      for (int i = 0; i < COL_COUNT; i++) {
        cols.put(new HStoreKey(row, Bytes.toBytes(Integer.toString(i)), timestamp),
            new byte[] { (byte)(i + '0') });
      }
      log.append(regionName, tableName, cols, false);
      long logSeqId = log.startCacheFlush();
      log.completeCacheFlush(regionName, tableName, logSeqId);
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

    InternalScanner s =
      r.getScanner(cols, HConstants.EMPTY_START_ROW, System.currentTimeMillis(), null);
    int numFetched = 0;
    try {
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      int k = 0;
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
          byte [] col = it.next();
          byte [] val = curVals.get(col).getValue();
          int curval =
            Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
          for(int j = 0; j < cols.length; j++) {
            if (Bytes.compareTo(col, cols[j]) == 0) {
              assertEquals("Error at:" + curKey.getRow() + "/"
                  + curKey.getTimestamp()
                  + ", Value for " + col + " should be: " + k
                  + ", but was fetched as: " + curval, k, curval);
              numFetched++;
            }
          }
        }
        curVals.clear();
        k++;
      }
    } finally {
      s.close();
    }
    assertEquals("Inserted " + numInserted + " values, but fetched " + numFetched, numInserted, numFetched);

    LOG.info("Scanned " + (vals1.length / 2)
        + " rows from cache. Elapsed time: "
        + ((System.currentTimeMillis() - startTime) / 1000.0));

    // 3.  Flush to disk
   
    startTime = System.currentTimeMillis();
   
    region.flushcache();

    LOG.info("Cache flush elapsed time: "
        + ((System.currentTimeMillis() - startTime) / 1000.0));

    // 4.  Scan from disk
   
    startTime = System.currentTimeMillis();
   
    s = r.getScanner(cols, HConstants.EMPTY_START_ROW,
      System.currentTimeMillis(), null);
    numFetched = 0;
    try {
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      int k = 0;
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
          byte [] col = it.next();
          byte [] val = curVals.get(col).getValue();
          int curval =
            Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
          for(int j = 0; j < cols.length; j++) {
            if (Bytes.compareTo(col, cols[j]) == 0) {
              assertEquals("Error at:" + curKey.getRow() + "/"
                  + curKey.getTimestamp()
                  + ", Value for " + col + " should be: " + k
                  + ", but was fetched as: " + curval, k, curval);
              numFetched++;
            }
          }
        }
        curVals.clear();
        k++;
      }
    } finally {
      s.close();
    }
    assertEquals("Inserted " + numInserted + " values, but fetched " + numFetched, numInserted, numFetched);

    LOG.info("Scanned " + (vals1.length / 2)
        + " rows from disk. Elapsed time: "
        + ((System.currentTimeMillis() - startTime) / 1000.0));

    // 5.  Insert more values
   
    startTime = System.currentTimeMillis();

    for(int k = vals1.length/2; k < vals1.length; k++) {
      String kLabel = String.format("%1$03d", k);
     
      BatchUpdate batchUpdate =
        new BatchUpdate(Bytes.toBytes("row_vals1_" + kLabel),
          System.currentTimeMillis());
      batchUpdate.put(cols[0], vals1[k].getBytes(HConstants.UTF8_ENCODING));
      batchUpdate.put(cols[1], vals1[k].getBytes(HConstants.UTF8_ENCODING));
      region.commit(batchUpdate);
      numInserted += 2;
    }

    LOG.info("Write " + (vals1.length / 2) + " rows. Elapsed time: "
        + ((System.currentTimeMillis() - startTime) / 1000.0));

    // 6.  Scan from cache and disk
   
    startTime = System.currentTimeMillis();

    s = r.getScanner(cols, HConstants.EMPTY_START_ROW,
        System.currentTimeMillis(), null);
    numFetched = 0;
    try {
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      int k = 0;
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
          byte [] col = it.next();
          byte [] val = curVals.get(col).getValue();
          int curval =
            Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
          for(int j = 0; j < cols.length; j++) {
            if(Bytes.compareTo(col, cols[j]) == 0) {
              assertEquals("Error at:" + curKey.getRow() + "/"
                  + curKey.getTimestamp()
                  + ", Value for " + col + " should be: " + k
                  + ", but was fetched as: " + curval, k, curval);
              numFetched++;
            }
          }
        }
        curVals.clear();
        k++;
      }
    } finally {
      s.close();
    }
    assertEquals("Inserted " + numInserted + " values, but fetched " + numFetched, numInserted, numFetched);

    LOG.info("Scanned " + vals1.length
        + " rows from cache and disk. Elapsed time: "
        + ((System.currentTimeMillis() - startTime) / 1000.0));
   
    // 7.  Flush to disk
   
    startTime = System.currentTimeMillis();
   
    region.flushcache();

    LOG.info("Cache flush elapsed time: "
        + ((System.currentTimeMillis() - startTime) / 1000.0));
   
    // 8.  Scan from disk
   
    startTime = System.currentTimeMillis();
   
    s = r.getScanner(cols, HConstants.EMPTY_START_ROW, System.currentTimeMillis(), null);
    numFetched = 0;
    try {
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      int k = 0;
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
          byte [] col = it.next();
          byte [] val = curVals.get(col).getValue();
          int curval =
            Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
          for (int j = 0; j < cols.length; j++) {
            if (Bytes.compareTo(col, cols[j]) == 0) {
              assertEquals("Value for " + col + " should be: " + k
                  + ", but was fetched as: " + curval, curval, k);
              numFetched++;
            }
          }
        }
        curVals.clear();
        k++;
      }
    } finally {
      s.close();
    }
    assertEquals("Inserted " + numInserted + " values, but fetched " + numFetched, numInserted, numFetched);
   
    LOG.info("Scanned " + vals1.length
        + " rows from disk. Elapsed time: "
        + ((System.currentTimeMillis() - startTime) / 1000.0));

    // 9. Scan with a starting point

    startTime = System.currentTimeMillis();
   
    s = r.getScanner(cols, Bytes.toBytes("row_vals1_500"),
        System.currentTimeMillis(), null);
   
    numFetched = 0;
    try {
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      int k = 500;
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

    try {

      int contentsFetched = 0;
      int anchorFetched = 0;
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      int k = 0;
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
          byte [] col = it.next();
          byte [] val = curVals.get(col).getValue();
          String curval = Bytes.toString(val);
          if(Bytes.compareTo(col, CONTENTS_BASIC) == 0) {
            assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
                + ", Value for " + col + " should start with: " + CONTENTSTR
                + ", but was fetched as: " + curval,
                curval.startsWith(CONTENTSTR));
            contentsFetched++;
           
          } else if (Bytes.toString(col).startsWith(ANCHORNUM)) {
            assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
                + ", Value for " + Bytes.toString(col) +
                " should start with: " + ANCHORSTR
                + ", but was fetched as: " + curval,
                curval.startsWith(ANCHORSTR));
            anchorFetched++;
           
          } else {
            LOG.info("UNEXPECTED COLUMN " + col);
          }
        }
        curVals.clear();
        k++;
      }
      assertEquals("Expected " + NUM_VALS + " " + CONTENTS_BASIC + " values, but fetched " + contentsFetched, NUM_VALS, contentsFetched);
      assertEquals("Expected " + NUM_VALS + " " + ANCHORNUM + " values, but fetched " + anchorFetched, NUM_VALS, anchorFetched);

      LOG.info("Scanned " + NUM_VALS
          + " rows from disk. Elapsed time: "
          + ((System.currentTimeMillis() - startTime) / 1000.0));
     
    } finally {
      s.close();
    }
   
    // Verify testScan data
   
    cols = new byte [][] {CONTENTS_FIRSTCOL, ANCHOR_SECONDCOL};
   
    startTime = System.currentTimeMillis();

    s = r.getScanner(cols, HConstants.EMPTY_START_ROW,
      System.currentTimeMillis(), null);
    try {
      int numFetched = 0;
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      int k = 0;
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
          byte [] col = it.next();
          byte [] val = curVals.get(col).getValue();
          int curval =
            Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());

          for (int j = 0; j < cols.length; j++) {
            if (Bytes.compareTo(col, cols[j]) == 0) {
              assertEquals("Value for " + col + " should be: " + k
                  + ", but was fetched as: " + curval, curval, k);
              numFetched++;
            }
          }
        }
        curVals.clear();
        k++;
      }
      assertEquals("Inserted " + numInserted + " values, but fetched " + numFetched, numInserted, numFetched);

      LOG.info("Scanned " + (numFetched / 2)
          + " rows from disk. Elapsed time: "
          + ((System.currentTimeMillis() - startTime) / 1000.0));
     
    } finally {
      s.close();
    }
   
    // Test a scanner which only specifies the column family name
   
    cols = new byte [][] {
        Bytes.toBytes("anchor:")
    };
   
    startTime = System.currentTimeMillis();
   
    s = r.getScanner(cols, HConstants.EMPTY_START_ROW, System.currentTimeMillis(), null);

    try {
      int fetched = 0;
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      while(s.next(curKey, curVals)) {
        for(Iterator<byte []> it = curVals.keySet().iterator(); it.hasNext(); ) {
          it.next();
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

    InternalScanner rootScanner = rootRegion.getScanner(
        HConstants.COL_REGIONINFO_ARRAY, HConstants.EMPTY_START_ROW,
        HConstants.LATEST_TIMESTAMP, null);

    try {
      HStoreKey key = new HStoreKey();
      SortedMap<byte [], Cell> results =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      while (rootScanner.next(key, results)) {
        HRegionInfo info = Writables.getHRegionInfoOrNull(
            results.get(HConstants.COL_REGIONINFO).getValue());
        if (info == null) {
          LOG.warn("region info is null for row " + key.getRow() +
              " in table " + HConstants.ROOT_TABLE_NAME);
          continue;
        }
        if (!listener.processRow(info)) {
          break;
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

  public void scanMetaRegion(final HRegion m, final ScannerListener listener)
  throws IOException {
    InternalScanner metaScanner = m.getScanner(HConstants.COL_REGIONINFO_ARRAY,
      HConstants.EMPTY_START_ROW, HConstants.LATEST_TIMESTAMP, null);
    try {
      HStoreKey key = new HStoreKey();
      SortedMap<byte[], Cell> results =
        new TreeMap<byte[], Cell>(Bytes.BYTES_COMPARATOR);
      while (metaScanner.next(key, results)) {
        HRegionInfo info = Writables.getHRegionInfoOrNull(
            results.get(HConstants.COL_REGIONINFO).getValue());
        if (info == null) {
          LOG.warn("regioninfo null for row " + key.getRow() + " in table " +
            Bytes.toString(m.getTableDesc().getName()));
          continue;
        }
        if (!listener.processRow(info)) {
          break;
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

    byte [] result = null;
    // Synchronize on the map to make the tailMap making 'safe'.
    synchronized (map) {
      // Make an HSK with maximum timestamp so we get past most of the current
      // rows cell entries.
      HStoreKey hsk = new HStoreKey(row, HConstants.LATEST_TIMESTAMP, this.regionInfo);
      SortedMap<HStoreKey, byte []> tailMap = map.tailMap(hsk);
      // Iterate until we fall into the next row; i.e. move off current row
      for (Map.Entry<HStoreKey, byte []> es: tailMap.entrySet()) {
        HStoreKey itKey = es.getKey();
        if (HStoreKey.compareTwoRowKeys(regionInfo, itKey.getRow(), row) <= 0)
          continue;
        // Note: Not suppressing deletes or expired cells.
        result = itKey.getRow();
        break;
      }
    }
    return result;
  }
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

    }
    List<HStoreKey> victims = new ArrayList<HStoreKey>();
    SortedMap<HStoreKey, byte[]> tailMap = map.tailMap(key);
    long now = System.currentTimeMillis();
    for (Map.Entry<HStoreKey, byte []> es: tailMap.entrySet()) {
      HStoreKey itKey = es.getKey();
      byte [] itCol = itKey.getColumn();
      Cell cell = results.get(itCol);
      if ((cell == null || cell.getNumValues() < numVersions) && key.matchesWithoutColumn(itKey)) {
        if (columns == null || columns.contains(itKey.getColumn())) {
          byte [] val = tailMap.get(itKey);
          if (HLogEdit.isDeleted(val)) {
            if (!deletes.containsKey(itCol)
              || deletes.get(itCol).longValue() < itKey.getTimestamp()) {
              deletes.put(itCol, Long.valueOf(itKey.getTimestamp()));
            }
          } else if (!(deletes.containsKey(itCol)
              && deletes.get(itCol).longValue() >= itKey.getTimestamp())) {
            // Skip expired cells
            if (ttl == HConstants.FOREVER ||
                  now < itKey.getTimestamp() + ttl) {
              if (cell == null) {
                results.put(itCol, new Cell(val, itKey.getTimestamp()));
              } else {
                cell.add(val, itKey.getTimestamp());
              }
            } else {
              addVictim(victims, itKey);
            }
          }
        }
      } else if (HStoreKey.compareTwoRowKeys(regionInfo, key.getRow(),
          itKey.getRow()) < 0) {
        break;
      }
    }
    // Remove expired victims from the map.
    for (HStoreKey v: victims) {
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

  private void scan(boolean validateStartcode, String serverName)
  throws IOException
    InternalScanner scanner = null;
    TreeMap<byte [], Cell> results =
      new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
    HStoreKey key = new HStoreKey();

    byte [][][] scanColumns = {
        COLS,
        EXPLICIT_COLS
    };
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

  private int count(final HRegionIncommon hri, final int flushIndex)
  throws IOException {
    LOG.info("Taking out counting scan");
    ScannerIncommon s = hri.getScanner(EXPLICIT_COLS,
        HConstants.EMPTY_START_ROW, HConstants.LATEST_TIMESTAMP);
    HStoreKey key = new HStoreKey();
    SortedMap<byte [], Cell> values =
      new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
    if (flushIndex == -1) {
      hri.flushcache();
    }
View Full Code Here

Examples of org.apache.hadoop.hbase.HStoreKey

  throws IOException {
    byte [][] cols = {column};
    InternalScanner s = r.getScanner(cols,
      HConstants.EMPTY_START_ROW, System.currentTimeMillis(), null);
    try {
      HStoreKey curKey = new HStoreKey();
      TreeMap<byte [], Cell> curVals =
        new TreeMap<byte [], Cell>(Bytes.BYTES_COMPARATOR);
      boolean first = true;
      OUTER_LOOP: while(s.next(curKey, curVals)) {
        for(byte [] col: curVals.keySet()) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.