Configuration conf = null;
if (!isTest) {
conf = context.getConfiguration();
loaderName = conf.get(JobConstants.JOB_ETL_LOADER);
}
Loader loader = (Loader) ClassUtils.instantiate(loaderName);
// Objects that should be pass to the Executor execution
PrefixContext subContext = null;
Object configConnection = null;
Object configJob = null;
Schema schema = null;
if (!isTest) {
// Propagate connector schema in every case for now
// TODO: Change to coditional choosing between HIO and Connector schema
schema = ConfigurationUtils.getConnectorSchema(conf);
switch (ConfigurationUtils.getJobType(conf)) {
case EXPORT:
subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
configConnection = ConfigurationUtils.getConfigConnectorConnection(conf);
configJob = ConfigurationUtils.getConfigConnectorJob(conf);
break;
case IMPORT:
subContext = new PrefixContext(conf, "");
configConnection = ConfigurationUtils.getConfigFrameworkConnection(conf);
configJob = ConfigurationUtils.getConfigFrameworkJob(conf);
break;
default:
throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
}
}
// Create loader context
LoaderContext loaderContext = new LoaderContext(subContext, reader, schema);
LOG.info("Running loader class " + loaderName);
loader.load(loaderContext, configConnection, configJob);
LOG.info("Loader has finished");
} catch (Throwable t) {
readerFinished = true;
LOG.error("Error while loading data out of MR job.", t);
// Release so that the writer can tell the framework something went