}
/** See CRUNCH-251 */
@Test
public void testMultipleHFileTargets() throws Exception {
Pipeline pipeline = new MRPipeline(HFileTargetIT.class, HBASE_TEST_UTILITY.getConfiguration());
Path inputPath = copyResourceFileToHDFS("shakes.txt");
Path outputPath1 = getTempPathOnHDFS("out1");
Path outputPath2 = getTempPathOnHDFS("out2");
HTable table1 = createTable(26);
HTable table2 = createTable(26);
LoadIncrementalHFiles loader = new LoadIncrementalHFiles(HBASE_TEST_UTILITY.getConfiguration());
PCollection<String> shakespeare = pipeline.read(At.textFile(inputPath, Writables.strings()));
PCollection<String> words = split(shakespeare, "\\s+");
PCollection<String> shortWords = words.filter(SHORT_WORD_FILTER);
PCollection<String> longWords = words.filter(FilterFns.not(SHORT_WORD_FILTER));
PTable<String, Long> shortWordCounts = shortWords.count();
PTable<String, Long> longWordCounts = longWords.count();
HFileUtils.writePutsToHFilesForIncrementalLoad(
convertToPuts(shortWordCounts),
table1,
outputPath1);
HFileUtils.writePutsToHFilesForIncrementalLoad(
convertToPuts(longWordCounts),
table2,
outputPath2);
PipelineResult result = pipeline.run();
assertTrue(result.succeeded());
loader.doBulkLoad(outputPath1, table1);
loader.doBulkLoad(outputPath2, table2);