}
private JobScript convert(ExecutionContext context, HadoopScript script) throws InterruptedException, IOException {
assert context != null;
assert script != null;
JobScript result = new JobScript();
result.setBatchId(context.getBatchId());
result.setFlowId(context.getFlowId());
result.setExecutionId(context.getExecutionId());
result.setPhase(context.getPhase());
result.setArguments(new HashMap<String, String>(context.getArguments()));
result.setStageId(script.getId());
result.setMainClassName(script.getClassName());
Map<String, String> props = new HashMap<String, String>();
props.putAll(getProperties(context, script));
props.putAll(script.getHadoopProperties());
props.put(HadoopScriptUtil.PROP_TRACKING_ID, Job.computeTrackingId(context, script));
result.setProperties(props);
Map<String, String> env = new HashMap<String, String>();
// NOTE: Handler has only dummy environment variables
// env.putAll(getEnvironmentVariables(context, script));
env.putAll(context.getEnvironmentVariables());
env.putAll(script.getEnvironmentVariables());
result.setEnvironmentVariables(env);
return result;
}