conf.set("mapreduce.job.cache.archives.visibilities",
context.getConfiguration().get("mapreduce.job.cache.archives.visibilities", ""));
// can't use IntegrationTest instance of util because it hasn't been
// instantiated on the JVM running this method. Create our own.
IntegrationTestingUtility util =
new IntegrationTestingUtility(conf);
// this is why we're here: launch a child job. The rest of this should
// look a lot like TestImportTsv#testMROnTable.
final String table = format("%s-%s-child", NAME, context.getJobID());
final String cf = "FAM";
String fileLocation = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
conf.set(ImportTsv.CREDENTIALS_LOCATION, fileLocation);
String[] args = {
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
table
};
try {
util.createTable(table, cf);
LOG.info("testRunFromOutputCommitter: launching child job.");
TestImportTsv.doMROnTableTest(util, cf, null, args, 1);
} catch (Exception e) {
throw new IOException("Underlying MapReduce job failed. Aborting commit.", e);
} finally {
if (util.getHBaseAdmin().tableExists(TableName.valueOf(table))) {
util.deleteTable(table);
}
}
}