Program program;
try {
program = Programs.create(locationFactory.create(programLocation), classLoader);
// See if it is launched from Workflow, if it is, change the Program.
if (workflowBatch != null) {
MapReduceSpecification mapReduceSpec = program.getSpecification().getMapReduce().get(workflowBatch);
Preconditions.checkArgument(mapReduceSpec != null, "Cannot find MapReduceSpecification for %s", workflowBatch);
program = new WorkflowMapReduceProgram(program, mapReduceSpec);
}
} catch (IOException e) {
LOG.error("Could not init Program based on location: " + programLocation);
throw Throwables.propagate(e);
}
// Initializing dataset context and hooking it up with mapreduce job transaction
DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
CConfiguration configuration = injector.getInstance(CConfiguration.class);
ApplicationSpecification programSpec = program.getSpecification();
// if this is not for a mapper or a reducer, we don't need the metrics collection service
MetricsCollectionService metricsCollectionService =
(type == null) ? null : injector.getInstance(MetricsCollectionService.class);
DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
// Creating mapreduce job context
MapReduceSpecification spec = program.getSpecification().getMapReduce().get(program.getName());
BasicMapReduceContext context =
new BasicMapReduceContext(program, type, RunIds.fromString(runId),
runtimeArguments, programSpec.getDatasets().keySet(), spec, logicalStartTime,
workflowBatch, discoveryServiceClient, metricsCollectionService,
datasetFramework, configuration);