if (jobMap.containsKey(hadoopJobId)) {
                    setJobState(jobMap.get(hadoopJobId), JobMetadata.DONE);
                } else {
                    MetaGraphTx tx = metaGraph.newTransaction();
                    JobMetadata jobMetadata = tx.getJob(jobId);
                    JobMetadata childJobMetadata = tx.createJob(jobMetadata);
                    childJobMetadata.setName("faunusHadoopJob");
                    childJobMetadata.setState(JobMetadata.DONE);
                    childJobMetadata.setProgress(1.0f);
                    childJobMetadata.setMapreduceJobId(hadoopJobId.toString());
                    tx.commit();
                    jobMap.put(hadoopJobId, childJobMetadata.getId());
                }
            }
        }
        for (Job hadoopJob: failedJobs) {
            JobID hadoopJobId = hadoopJob.getJobID();
            logger.debug("found failed hadoop job:", hadoopJobId.toString());
            if (jobMap.containsKey(hadoopJobId)) {
                setJobState(jobMap.get(hadoopJobId), JobMetadata.ERROR);
            } else {
                MetaGraphTx tx = metaGraph.newTransaction();
                JobMetadata jobMetadata = tx.getJob(jobId);
                JobMetadata childJobMetadata = tx.createJob(jobMetadata);
                childJobMetadata.setName("faunusHadoopJob");
                childJobMetadata.setMapreduceJobId(hadoopJobId.toString());
                childJobMetadata.setState(JobMetadata.ERROR);
                tx.commit();
                jobMap.put(hadoopJobId, childJobMetadata.getId());
            }
        }
        float totalProgress;
        // Don't divide by zero if we don't have any jobs in progress.
        if (jobsInProgress.isEmpty()) {
            totalProgress = 1;
        } else {
            totalProgress = ((float) successfulJobs.size()) / ((float) jobsInProgress.size());
        }
        Job hadoopRunningJob = jobControl.getRunningJob();
        if (hadoopRunningJob != null) {
            JobID hadoopJobId = hadoopRunningJob.getJobID();
            logger.debug("found active hadoop job:", hadoopJobId.toString());
            JobStatus status = hadoopRunningJob.getStatus();
            float progress = 0.25f * (
                    status.getSetupProgress() +
                            status.getMapProgress() +
                            status.getReduceProgress() +
                            status.getCleanupProgress());
            logger.debug("found progress: "
                    + status.getSetupProgress() + " "
                    + status.getMapProgress() + " "
                    + status.getReduceProgress() + " "
                    + status.getCleanupProgress() + " "
                    + progress);
            if (jobMap.containsKey(hadoopJobId)) {
                setJobProgress(jobMap.get(hadoopJobId), progress);
            } else {
                MetaGraphTx tx = metaGraph.newTransaction();
                JobMetadata jobMetadata = tx.getJob(jobId);
                JobMetadata childJobMetadata = tx.createJob(jobMetadata);
                childJobMetadata.setName("faunusHadoopJob");
                childJobMetadata.setProgress(progress);
                childJobMetadata.setState(JobMetadata.RUNNING);
                childJobMetadata.setMapreduceJobId(hadoopJobId.toString());
                tx.commit();
                jobMap.put(hadoopJobId, childJobMetadata.getId());
            }
            JobMetadata.Id jobMetadataId = jobMap.get(hadoopJobId);
            setJobProgress(jobMetadataId, progress);
            totalProgress += (progress / ((float) jobsInProgress.size()));