Package org.apache.hadoop.hbase.mapreduce

Examples of org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles.doBulkLoad()


      HTable table = new HTable(conf, tableName);
      try {
        HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
        TEST_UTIL.waitTableEnabled(admin, tableName.getName());
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here


                  return false;
              }
 
              LOG.info("Loading HFiles from {}", outputPath);
              LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
              loader.doBulkLoad(outputPath, htable);
              htable.close();
 
              LOG.info("Incremental load complete for table=" + tableName);
 
              LOG.info("Removing output directory {}", outputPath);
View Full Code Here

    // HBase has write access to familyDir's contents
    chmod(familyDir.toString());

    HTable table = new HTable(conf, TABLE);
    LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
    loader.doBulkLoad(dir, table);

    assertEquals(expectedRows, HBaseTestUtil.countRows(table));

    // disable and drop if we succeeded to verify
    admin.disableTable(TABLE);
View Full Code Here

      HTable table = new HTable(conf, tableName);
      try {
        HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
        TEST_UTIL.waitTableEnabled(admin, tableName.getName());
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here

      HTable table = new HTable(conf, tableName);
      try {
        HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
        TEST_UTIL.waitTableEnabled(admin, tableName.getName());
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here

      HTable table = new HTable(conf, tableName);
      try {
        HBaseAdmin admin = new HBaseAdmin(TEST_UTIL.getConfiguration());
        TEST_UTIL.waitTableEnabled(admin, tableName.getName());
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here

        table2,
        outputPath2);

    PipelineResult result = pipeline.run();
    assertTrue(result.succeeded());
    loader.doBulkLoad(outputPath1, table1);
    loader.doBulkLoad(outputPath2, table2);

    assertEquals(396L, getWordCountFromTable(table1, "of"));
    assertEquals(427L, getWordCountFromTable(table2, "and"));
  }
View Full Code Here

        outputPath2);

    PipelineResult result = pipeline.run();
    assertTrue(result.succeeded());
    loader.doBulkLoad(outputPath1, table1);
    loader.doBulkLoad(outputPath2, table2);

    assertEquals(396L, getWordCountFromTable(table1, "of"));
    assertEquals(427L, getWordCountFromTable(table2, "and"));
  }
View Full Code Here

    // Load generated HFiles into table
    try {
      LoadIncrementalHFiles loader = new LoadIncrementalHFiles(
        job.getConfiguration());
      loader.doBulkLoad(bulkLoadDir, hTable);
    }
    catch (Exception e) {
      String errorMessage = String.format("Unrecoverable error while " +
        "performing the bulk load of files in [%s]",
        bulkLoadDir.toString());
View Full Code Here

      HTable table = new HTable(conf, tableName);
      try {
        TEST_UTIL.waitTableEnabled(tableName);
        LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
        loader.doBulkLoad(loadPath, table);
      } finally {
        table.close();
      }
    }
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.