int reducers = new JobClient(pipeline.getConfiguration()).getClusterStatus().getMaxReduceTasks(); // MR1
//reducers = job.getCluster().getClusterStatus().getReduceSlotCapacity(); // Yarn only
LOG.info("Cluster reports {} reduce slots", reducers);
} else if (opts.pipelineType == PipelineType.spark) {
SparkConf sconf = new SparkConf();
if (!sconf.contains("spark.app.name") || sconf.get("spark.app.name").equals(getClass().getName())) {
sconf.setAppName(Utils.getShortClassName(getClass()));
}
JavaSparkContext sparkContext = new JavaSparkContext(sconf);
pipeline = new SparkPipeline(sparkContext, sparkContext.appName());
pipeline.setConfiguration(getConf());
} else {