c.getValueClass(),
job.getOutputFormatClass(),
job.getConfiguration());
pt.handleOutputs(job.getConfiguration(), tmpPath, -1);
} else if (t instanceof MapReduceTarget) {
MapReduceTarget mrt = (MapReduceTarget) t;
mrt.configureForMapReduce(job, ptype, new Path("/tmp"), null);
outRDD.saveAsHadoopDataset(new JobConf(job.getConfiguration()));
} else {
throw new IllegalArgumentException("Spark execution cannot handle non-MapReduceTarget: " + t);
}
} catch (Exception et) {