// use bulkload
final Path hfilePath = writeToHFile(l, "/temp/testBulkLoadWithParallelScan/",
"/temp/testBulkLoadWithParallelScan/col/file", false);
Configuration conf = TEST_UTIL.getConfiguration();
conf.setBoolean("hbase.mapreduce.bulkload.assign.sequenceNumbers", true);
final LoadIncrementalHFiles bulkload = new LoadIncrementalHFiles(conf);
ResultScanner scanner = table.getScanner(scan);
// Create a scanner and then do bulk load
final CountDownLatch latch = new CountDownLatch(1);
new Thread() {
public void run() {
try {
Put put1 = new Put(Bytes.toBytes("row5"));
put1.add(new KeyValue(Bytes.toBytes("row5"), Bytes.toBytes("col"), Bytes.toBytes("q"), l,
Bytes.toBytes("version0")));
table.put(put1);
table.flushCommits();
bulkload.doBulkLoad(hfilePath, table);
latch.countDown();
} catch (TableNotFoundException e) {
} catch (IOException e) {
}
}