}
for (Target t : targets) {
Configuration conf = new Configuration(getConfiguration());
getRuntimeContext().setConf(sparkContext.broadcast(WritableUtils.toByteArray(conf)));
if (t instanceof MapReduceTarget) { //TODO: check this earlier
Converter c = t.getConverter(ptype);
IdentityFn ident = IdentityFn.getInstance();
JavaPairRDD<?, ?> outRDD;
if (rdd instanceof JavaRDD) {
outRDD = ((JavaRDD) rdd)
.map(new MapFunction(c.applyPTypeTransforms() ? ptype.getOutputMapFn() : ident, ctxt))
.mapToPair(new OutputConverterFunction(c));
} else {
outRDD = ((JavaPairRDD) rdd)
.map(new PairMapFunction(c.applyPTypeTransforms() ? ptype.getOutputMapFn() : ident, ctxt))
.mapToPair(new OutputConverterFunction(c));
}
try {
Job job = new Job(conf);
if (t instanceof PathTarget) {
PathTarget pt = (PathTarget) t;
pt.configureForMapReduce(job, ptype, pt.getPath(), null);
Path tmpPath = pipeline.createTempPath();
outRDD.saveAsNewAPIHadoopFile(
tmpPath.toString(),
c.getKeyClass(),
c.getValueClass(),
job.getOutputFormatClass(),
job.getConfiguration());
pt.handleOutputs(job.getConfiguration(), tmpPath, -1);
} else if (t instanceof MapReduceTarget) {
MapReduceTarget mrt = (MapReduceTarget) t;