JobConf jobConf = job.getJobConf();
RunningJob rj = null;
try {
rj = jobClient.getJob(job.getAssignedJobID());
} catch (IOException e1) {
String error = "Unable to get the job statistics from JobClient.";
throw new ExecException(error, e1);
}
if(rj == null)
continue;
Map<String, String> jobStats = new HashMap<String, String>();
stats.put(job.getAssignedJobID().toString(), jobStats);
try {
PhysicalPlan plan = (PhysicalPlan) ObjectSerializer.deserialize(jobConf.get("pig.mapPlan"));
jobStats.put("PIG_STATS_MAP_PLAN", plan.toString());
plan = (PhysicalPlan) ObjectSerializer.deserialize(jobConf.get("pig.combinePlan"));
if(plan != null) {
jobStats.put("PIG_STATS_COMBINE_PLAN", plan.toString());
}
plan = (PhysicalPlan) ObjectSerializer.deserialize(jobConf.get("pig.reducePlan"));
if(plan != null) {
jobStats.put("PIG_STATS_REDUCE_PLAN", plan.toString());
}
} catch (IOException e2) {
String error = "Error deserializing plans from the JobConf.";
throw new RuntimeException(error, e2);
}
Counters counters = null;
try {
counters = rj.getCounters();
// This code checks if the counters is null, if it is, then all the stats are unknown.
// We use -1 to indicate unknown counter. In fact, Counters should not be null, it is
// a hadoop bug, once this bug is fixed in hadoop, the null handling code should never be hit.
// See Pig-943
if (counters!=null)