ProgramType processorType = program.getType();
Preconditions.checkNotNull(processorType, "Missing processor type.");
Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");
final SparkSpecification spec = appSpec.getSpark().get(program.getName());
Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());
// Optionally get runId. If the spark started by other program (e.g. Workflow), it inherit the runId.
Arguments arguments = options.getArguments();
RunId runId = arguments.hasOption(ProgramOptionConstants.RUN_ID) ? RunIds.fromString(arguments.getOption
(ProgramOptionConstants.RUN_ID)) : RunIds.generate();
long logicalStartTime = arguments.hasOption(ProgramOptionConstants.LOGICAL_START_TIME)
? Long.parseLong(arguments.getOption(ProgramOptionConstants.LOGICAL_START_TIME)) : System.currentTimeMillis();
String workflowBatch = arguments.getOption(ProgramOptionConstants.WORKFLOW_BATCH);
Spark spark;
try {
spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
} catch (Exception e) {
LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
throw Throwables.propagate(e);
}
final BasicSparkContext context = new BasicSparkContext(program, runId, options.getUserArguments(),
program.getSpecification().getDatasets().keySet(), spec,