Package org.apache.hadoop.hbase.client

Examples of org.apache.hadoop.hbase.client.Table


    } catch (Exception ex) {
      LOG.info("Caught expected exception: " + ex);
    }

    //sanity check try to write and read from table
    Table table = new HTable(TEST_UTIL.getConfiguration(), desc.getTableName());
    Put p = new Put(Bytes.toBytes("row1"));
    p.add(Bytes.toBytes("my_cf"),Bytes.toBytes("my_col"),Bytes.toBytes("value1"));
    table.put(p);
    //flush and read from disk to make sure directory changes are working
    admin.flush(desc.getTableName());
    Get g = new Get(Bytes.toBytes("row1"));
    assertTrue(table.exists(g));

    //normal case of removing namespace
    TEST_UTIL.deleteTable(desc.getTableName());
    admin.deleteNamespace(nsName);
  }
View Full Code Here


  public synchronized void create(NamespaceDescriptor ns) throws IOException {
    create(getNamespaceTable(), ns);
  }

  public synchronized void update(NamespaceDescriptor ns) throws IOException {
    Table table = getNamespaceTable();
    if (get(table, ns.getName()) == null) {
      throw new NamespaceNotFoundException(ns.getName());
    }
    upsert(table, ns);
  }
View Full Code Here

    Stopwatch tableOpenTimer = new Stopwatch();
    Stopwatch scanOpenTimer = new Stopwatch();
    Stopwatch scanTimer = new Stopwatch();

    tableOpenTimer.start();
    Table table = new HTable(getConf(), TableName.valueOf(tablename));
    tableOpenTimer.stop();

    Scan scan = getScan();
    scanOpenTimer.start();
    ResultScanner scanner = table.getScanner(scan);
    scanOpenTimer.stop();

    long numRows = 0;
    long numCells = 0;
    scanTimer.start();
    while (true) {
      Result result = scanner.next();
      if (result == null) {
        break;
      }
      numRows++;

      numCells += result.rawCells().length;
    }
    scanTimer.stop();
    scanner.close();
    table.close();

    ScanMetrics metrics = ProtobufUtil.toScanMetrics(scan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_DATA));
    long totalBytes = metrics.countOfBytesInResults.get();
    double throughput = (double)totalBytes / scanTimer.elapsedTime(TimeUnit.SECONDS);
    double throughputRows = (double)numRows / scanTimer.elapsedTime(TimeUnit.SECONDS);
View Full Code Here

   * @param tableName
   * @return
   * @throws IOException
   */
  public static Table createTable(byte[] tableName) throws IOException {
    Table table = UTIL.createTable(tableName, FAMILY);
    Put p = new Put("aaa".getBytes());
    p.add(FAMILY, null, "value aaa".getBytes());
    table.put(p);
    p = new Put("bbb".getBytes());
    p.add(FAMILY, null, "value bbb".getBytes());
    table.put(p);
    return table;
  }
View Full Code Here

        // otherwise return the real scanner.
        return (ResultScanner) invocation.callRealMethod();
      }
    };

    Table htable = spy(createTable(name));
    doAnswer(a).when(htable).getScanner((Scan) anyObject());
    return htable;
  }
View Full Code Here

        // otherwise return the real scanner.
        return (ResultScanner) invocation.callRealMethod();
      }
    };

    Table htable = spy(createTable(name));
    doAnswer(a).when(htable).getScanner((Scan) anyObject());
    return htable;
  }
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void testTableRecordReader() throws IOException {
    Table table = createTable("table1".getBytes());
    runTestMapred(table);
  }
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void testTableRecordReaderScannerFail() throws IOException {
    Table htable = createIOEScannerTable("table2".getBytes(), 1);
    runTestMapred(htable);
  }
View Full Code Here

   *
   * @throws IOException
   */
  @Test(expected = IOException.class)
  public void testTableRecordReaderScannerFailTwice() throws IOException {
    Table htable = createIOEScannerTable("table3".getBytes(), 2);
    runTestMapred(htable);
  }
View Full Code Here

   *
   * @throws org.apache.hadoop.hbase.DoNotRetryIOException
   */
  @Test
  public void testTableRecordReaderScannerTimeout() throws IOException {
    Table htable = createDNRIOEScannerTable("table4".getBytes(), 1);
    runTestMapred(htable);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.client.Table

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.