this.useNewApi = this.jobConf.getUseNewMapper();
this.isMapperOutput = jobConf.getBoolean(MRConfig.IS_MAP_PROCESSOR,
false);
jobConf.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID,
getContext().getDAGAttemptNumber());
TaskAttemptID taskAttemptId = org.apache.tez.mapreduce.hadoop.mapreduce.TaskAttemptContextImpl
.createMockTaskAttemptID(getContext().getApplicationId().getClusterTimestamp(),
getContext().getTaskVertexIndex(), getContext().getApplicationId().getId(),
getContext().getTaskIndex(), getContext().getTaskAttemptNumber(), isMapperOutput);
jobConf.set(JobContext.TASK_ATTEMPT_ID, taskAttemptId.toString());
jobConf.set(JobContext.TASK_ID, taskAttemptId.getTaskID().toString());
jobConf.setBoolean(JobContext.TASK_ISMAP, isMapperOutput);
jobConf.setInt(JobContext.TASK_PARTITION,
taskAttemptId.getTaskID().getId());
jobConf.set(JobContext.ID, taskAttemptId.getJobID().toString());
if (useNewApi) {
// set the output part name to have a unique prefix
if (jobConf.get("mapreduce.output.basename") == null) {
jobConf.set("mapreduce.output.basename", getOutputFileNamePrefix());