@Override
public void run(Context context) throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
String extractorName = conf.get(JobConstants.JOB_ETL_EXTRACTOR);
Extractor extractor = (Extractor) ClassUtils.instantiate(extractorName);
// Objects that should be pass to the Executor execution
PrefixContext subContext = null;
Object configConnection = null;
Object configJob = null;
// Propagate connector schema in every case for now
// TODO: Change to coditional choosing between HIO and Connector schema
Schema schema = ConfigurationUtils.getConnectorSchema(conf);
// Executor is in connector space for IMPORT and in framework space for EXPORT
switch (ConfigurationUtils.getJobType(conf)) {
case IMPORT:
subContext = new PrefixContext(conf, JobConstants.PREFIX_CONNECTOR_CONTEXT);
configConnection = ConfigurationUtils.getConfigConnectorConnection(conf);
configJob = ConfigurationUtils.getConfigConnectorJob(conf);
break;
case EXPORT:
subContext = new PrefixContext(conf, "");
configConnection = ConfigurationUtils.getConfigFrameworkConnection(conf);
configJob = ConfigurationUtils.getConfigFrameworkJob(conf);
break;
default:
throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0023);
}
SqoopSplit split = context.getCurrentKey();
ExtractorContext extractorContext = new ExtractorContext(subContext, new MapDataWriter(context), schema);
try {
LOG.info("Starting progress service");
progressService.scheduleAtFixedRate(new ProgressRunnable(context), 0, 2, TimeUnit.MINUTES);
LOG.info("Running extractor class " + extractorName);
extractor.extract(extractorContext, configConnection, configJob, split.getPartition());
LOG.info("Extractor has finished");
context.getCounter(SqoopCounters.ROWS_READ)
.increment(extractor.getRowsRead());
} catch (Exception e) {
throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0017, e);
} finally {
LOG.info("Stopping progress service");
progressService.shutdown();