Package org.apache.hadoop.tools.rumen

Examples of org.apache.hadoop.tools.rumen.ZombieJob


      GridmixJobStory gjs = new GridmixJobStory(new Path(tracePath),
                                                rtClient.getDaemonConf());
      Set<JobID> jobids = gjs.getZombieJobs().keySet();
      boolean isHighRamFlag = false;
      for (JobID jobid :jobids) {
        ZombieJob zombieJob = gjs.getZombieJobs().get(jobid);
        JobConf origJobConf = zombieJob.getJobConf();
        int origMapFactor =
            GridmixJobVerification.getMapFactor(origJobConf);
        int origReduceFactor =
            GridmixJobVerification.getReduceFactor(origJobConf);
        if (origMapFactor >= 2 || origReduceFactor >= 2) {
View Full Code Here


  }
 
  private Map<JobID, ZombieJob> buildJobStories() throws IOException {
    ZombieJobProducer zjp = new ZombieJobProducer(path,null, conf);
    Map<JobID, ZombieJob> hm = new HashMap<JobID, ZombieJob>();
    ZombieJob zj = zjp.getNextJob();
    while (zj != null) {
      hm.put(zj.getJobID(),zj);
      zj = zjp.getNextJob();
    }
    if (hm.size() == 0) {
      return null;
    } else {
View Full Code Here

      GridmixJobStory gjs = new GridmixJobStory(new Path(tracePath),
                                                rtClient.getDaemonConf());
      Set<JobID> jobids = gjs.getZombieJobs().keySet();
      boolean isHighRamFlag = false;
      for (JobID jobid :jobids) {
        ZombieJob zombieJob = gjs.getZombieJobs().get(jobid);
        JobConf origJobConf = zombieJob.getJobConf();
        int origMapFactor =
            GridmixJobVerification.getMapFactor(origJobConf);
        int origReduceFactor =
            GridmixJobVerification.getReduceFactor(origJobConf);
        if (origMapFactor >= 2 || origReduceFactor >= 2) {
View Full Code Here

      if (userResolverVal == null) {
        userResolverVal = simuJobConf.get(usrResolver);
      }
   
      ZombieJob zombieJob = gjs.getZombieJob(JobID.forName(origJobId));
      Map<String, Long> mapJobCounters = getJobMapCounters(zombieJob);
      Map<String, Long> reduceJobCounters = getJobReduceCounters(zombieJob);
      if (simuJobConf.get(jobSubKey).contains("REPLAY")) {
          origSubmissionTime.put(zombieJob.getSubmissionTime(),
                                 origJobId.toString() + "^" + simuJobId);
          simuSubmissionTime.put(Long.parseLong(jhInfo.getValues().get(JobHistory.Keys.SUBMIT_TIME)),
                                 origJobId.toString() + "^" + simuJobId); ;
      }

      LOG.info("Verifying the job <" + simuJobId + "> and wait for a while...");
      verifySimulatedJobSummary(zombieJob, jhInfo, simuJobConf);
      verifyJobMapCounters(counters, mapJobCounters, simuJobConf);
      verifyJobReduceCounters(counters, reduceJobCounters, simuJobConf);
      verifyCompressionEmulation(zombieJob.getJobConf(), simuJobConf, counters,
                                 reduceJobCounters, mapJobCounters);
      verifyDistributeCache(zombieJob,simuJobConf);
      setJobDistributedCacheInfo(simuJobId.toString(), simuJobConf,
         zombieJob.getJobConf());
      verifyHighRamMemoryJobs(zombieJob, simuJobConf);
      verifyCPUEmulationOfJobs(zombieJob, jhInfo, simuJobConf);
      verifyMemoryEmulationOfJobs(zombieJob, jhInfo, simuJobConf);
      LOG.info("Done.");
    }
View Full Code Here

              + origJobId + "<->" + simuJobId);

      if (userResolverVal == null) {
        userResolverVal = simuJobConf.get(usrResolver);
      }
      ZombieJob zombieJob = gjs.getZombieJob(JobID.forName(origJobId));
      Map<String, Long> mapJobCounters = getJobMapCounters(zombieJob);
      Map<String, Long> reduceJobCounters = getJobReduceCounters(zombieJob);
      if (simuJobConf.get(jobSubKey).contains("REPLAY")) {
          origSubmissionTime.put(zombieJob.getSubmissionTime(),
                                 origJobId.toString() + "^" + simuJobId);
          simuSubmissionTime.put(jhInfo.getSubmitTime() ,
                                 origJobId.toString() + "^" + simuJobId); ;
      }

      LOG.info("Verifying the job <" + simuJobId + "> and wait for a while...");
      verifySimulatedJobSummary(zombieJob, jhInfo, simuJobConf);
      verifyJobMapCounters(counters, mapJobCounters, simuJobConf);
      verifyJobReduceCounters(counters, reduceJobCounters, simuJobConf);
      verifyCompressionEmulation(zombieJob.getJobConf(), simuJobConf, counters,
                                 reduceJobCounters, mapJobCounters);
      verifyDistributeCache(zombieJob,simuJobConf);
      setJobDistributedCacheInfo(simuJobId.toString(), simuJobConf,
         zombieJob.getJobConf());
      verifyHighRamMemoryJobs(zombieJob, simuJobConf);
      verifyCPUEmulationOfJobs(zombieJob, jhInfo, simuJobConf);
      verifyMemoryEmulationOfJobs(zombieJob, jhInfo, simuJobConf);
      LOG.info("Done.");
    }
View Full Code Here

              + origJobId + "<->" + simuJobId);

      if (userResolverVal == null) {
        userResolverVal = simuJobConf.get(usrResolver);
      }
      ZombieJob zombieJob = gjs.getZombieJob(JobID.forName(origJobId));
      Map<String, Long> mapJobCounters = getJobMapCounters(zombieJob);
      Map<String, Long> reduceJobCounters = getJobReduceCounters(zombieJob);
      if (simuJobConf.get(jobSubKey).contains("REPLAY")) {
          origSubmissionTime.put(zombieJob.getSubmissionTime(),
                                 origJobId.toString() + "^" + simuJobId);
          simuSubmissionTime.put(jhInfo.getSubmitTime() ,
                                 origJobId.toString() + "^" + simuJobId); ;
      }

      LOG.info("Verifying the job <" + simuJobId + "> and wait for a while...");
      verifySimulatedJobSummary(zombieJob, jhInfo, simuJobConf);
      verifyJobMapCounters(counters, mapJobCounters, simuJobConf);
      verifyJobReduceCounters(counters, reduceJobCounters, simuJobConf);
      verifyCompressionEmulation(zombieJob.getJobConf(), simuJobConf, counters,
                                 reduceJobCounters, mapJobCounters);
      verifyDistributeCache(zombieJob,simuJobConf);
      setJobDistributedCacheInfo(simuJobId.toString(), simuJobConf,
         zombieJob.getJobConf());
      verifyHighRamMemoryJobs(zombieJob, simuJobConf);
      LOG.info("Done.");
    }
    verifyDistributedCacheBetweenJobs(simuAndOrigJobsInfo);
  }
View Full Code Here

   *
   * @throws IOException
   */
  private JobStory getNextJobFiltered() throws IOException {
    while (true) {
      ZombieJob job = producer.getNextJob();
      if (job == null) {
        return null;
      }
      if (job.getOutcome() == Pre21JobHistoryConstants.Values.KILLED) {
        continue;
      }
      if (job.getNumberMaps() == 0) {
        continue;
      }
      if (job.getNumLoggedMaps() == 0) {
        continue;
      }
      return job;
    }
  }
View Full Code Here

 
  private int jobBufferSize = 100;
 
  private void initBuffer() throws IOException {
    for (int i = 0; i < jobBufferSize; ++i) {
      ZombieJob job = producer.getNextJob();
      if (job == null) {
        isProducerEmpty = true;
        break;
      }
      jobBuffer.add(job);
View Full Code Here

    initBuffer();
  }

  @Override
  public ZombieJob getNextJob() throws IOException {
    ZombieJob job;
    if (!isProducerEmpty) {
      job = producer.getNextJob();
      if (job != null)
        jobBuffer.add(job);
      else
View Full Code Here

   *
   * @throws IOException
   */
  private JobStory getNextJobFiltered() throws IOException {
    while (true) {
      ZombieJob job = producer.getNextJob();
      if (job == null) {
        return null;
      }
      if (job.getOutcome() == Pre21JobHistoryConstants.Values.KILLED) {
        continue;
      }
      if (job.getNumberMaps() == 0) {
        continue;
      }
      if (job.getNumLoggedMaps() == 0) {
        continue;
      }
      return job;
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.tools.rumen.ZombieJob

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.