request.setOutputValueClass(NullWritable.class);
Importer importer = (Importer)request.getConnectorCallbacks();
// Set up framework context
MutableMapContext context = request.getFrameworkContext();
context.setString(JobConstants.JOB_ETL_PARTITIONER, importer.getPartitioner().getName());
context.setString(JobConstants.JOB_ETL_EXTRACTOR, importer.getExtractor().getName());
context.setString(JobConstants.JOB_ETL_DESTROYER, importer.getDestroyer().getName());
if(request.getExtractors() != null) {
context.setInteger(JobConstants.JOB_ETL_EXTRACTOR_NUM, request.getExtractors());
}
// TODO: This settings should be abstracted to core module at some point
if(jobConf.output.outputFormat == OutputFormat.TEXT_FILE) {
context.setString(JobConstants.JOB_ETL_LOADER, HdfsTextImportLoader.class.getName());
} else if(jobConf.output.outputFormat == OutputFormat.SEQUENCE_FILE) {
context.setString(JobConstants.JOB_ETL_LOADER, HdfsSequenceImportLoader.class.getName());
} else {
throw new SqoopException(MapreduceExecutionError.MAPRED_EXEC_0024,
"Format: " + jobConf.output.outputFormat);
}
if(getCompressionCodecName(jobConf) != null) {
context.setString(JobConstants.HADOOP_COMPRESS_CODEC,
getCompressionCodecName(jobConf));
context.setBoolean(JobConstants.HADOOP_COMPRESS, true);
}
}