LOG.error("Workflow with name " + newWorkflowName + " already exists!");
return null;
}
// Create a new workflow with a new name
HelixProperty workflowConfig = resourceConfigMap.get(origWorkflowName);
Map<String, String> wfSimpleFields = workflowConfig.getRecord().getSimpleFields();
JobDag jobDag = JobDag.fromJson(wfSimpleFields.get(WorkflowConfig.DAG));
Map<String, Set<String>> parentsToChildren = jobDag.getParentsToChildren();
Workflow.Builder builder = new Workflow.Builder(newWorkflowName);
// Set the workflow expiry
builder.setExpiry(Long.parseLong(wfSimpleFields.get(WorkflowConfig.EXPIRY)));
// Set the schedule, if applicable
ScheduleConfig scheduleConfig;
if (newStartTime != null) {
scheduleConfig = ScheduleConfig.oneTimeDelayedStart(newStartTime);
} else {
scheduleConfig = parseScheduleFromConfigMap(wfSimpleFields);
}
if (scheduleConfig != null) {
builder.setScheduleConfig(scheduleConfig);
}
// Add each job back as long as the original exists
Set<String> namespacedJobs = jobDag.getAllNodes();
for (String namespacedJob : namespacedJobs) {
if (resourceConfigMap.containsKey(namespacedJob)) {
// Copy over job-level and task-level configs
String job = getDenamespacedJobName(origWorkflowName, namespacedJob);
HelixProperty jobConfig = resourceConfigMap.get(namespacedJob);
Map<String, String> jobSimpleFields = jobConfig.getRecord().getSimpleFields();
jobSimpleFields.put(JobConfig.WORKFLOW_ID, newWorkflowName); // overwrite workflow name
for (Map.Entry<String, String> e : jobSimpleFields.entrySet()) {
builder.addConfig(job, e.getKey(), e.getValue());
}
Map<String, Map<String, String>> rawTaskConfigMap = jobConfig.getRecord().getMapFields();
List<TaskConfig> taskConfigs = Lists.newLinkedList();
for (Map<String, String> rawTaskConfig : rawTaskConfigMap.values()) {
TaskConfig taskConfig = TaskConfig.from(rawTaskConfig);
taskConfigs.add(taskConfig);
}