Examples of JobHistoryParser


Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

        JobHistory.getJobIDFromHistoryFilePath(logFile).toString();
    JobInfo jobInfo = null;
    synchronized(jobHistoryCache) {
      jobInfo = jobHistoryCache.remove(jobid);
      if (jobInfo == null) {
        JobHistoryParser parser = new JobHistoryParser(fs, logFile);
        jobInfo = parser.parse();
        LOG.info("Loading Job History file "+jobid + ".   Cache size is " +
            jobHistoryCache.size());
      }
      jobHistoryCache.put(jobid, jobInfo);
      int CACHE_SIZE =
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

        validateViewJob(jobTasksJSP + "&taskType=" + taskType + "&status="
            + state, "GET");
      }
    }

    JobHistoryParser parser =
        new JobHistoryParser(new Path(historyFileName).getFileSystem(conf),
            historyFileName);
    JobInfo jobInfo = parser.parse();
    Map<TaskID, TaskInfo> tipsMap = jobInfo.getAllTasks();
    for (TaskID tip : tipsMap.keySet()) {
      // validate access of taskdetailshistory.jsp
      validateViewJob(jtURL + "/taskdetailshistory.jsp?logFile="
          + historyFileName + "&tipid=" + tip.toString(), "GET");
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

    jobConf.addResource(jobConfFileUrl);
   
    /*
     * Read JobHistoryFile and build job counters to evaluate diagnostic rules
     */
    JobHistoryParser parser;
    JobInfo jobInfo;
    if (jobHistoryFileUrl.getProtocol().equals("hdfs")) {
      parser = new JobHistoryParser(FileSystem.get(jobConf),
                                    jobHistoryFileUrl.getPath());
      jobInfo = parser.parse();
    } else if (jobHistoryFileUrl.getProtocol().equals("file")) {
      parser = new JobHistoryParser(FileSystem.getLocal(jobConf),
                                    jobHistoryFileUrl.getPath());
      jobInfo = parser.parse();
    } else {
      throw new Exception("Malformed URL. Protocol: "+
          jobHistoryFileUrl.getProtocol());
    }
    return jobInfo;
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

    FileSystem fileSys = logFile.getFileSystem(conf);
    // Check if the history file exists
    assertTrue("History file does not exist", fileSys.exists(logFile));

    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
    JobHistoryParser.JobInfo jobInfo = parser.parse();

    // validate format of job level key, values
    validateJobLevelKeyValuesFormat(jobInfo, status);

    // validate format of task level key, values
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

    FileSystem fileSys = logFile.getFileSystem(conf);
    // Check if the history file exists
    assertTrue("History file does not exist", fileSys.exists(logFile));

    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
   
    JobHistoryParser.JobInfo jobInfo = parser.parse();
    // Now the history file contents are available in jobInfo. Let us compare
    // them with the actual values from JT.
    validateJobLevelKeyValues(mr, job, jobInfo, conf);
    validateTaskLevelKeyValues(mr, job, jobInfo);
    validateTaskAttemptLevelKeyValues(mr, job, jobInfo);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

    // check history file permission
    assertTrue("History file permissions does not match",
    fileSys.getFileStatus(logFile).getPermission().equals(
       new FsPermission(JobHistory.HISTORY_FILE_PERMISSION)));
   
    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
    JobHistoryParser.JobInfo jobInfo = parser.parse();
   

    assertTrue("Job Status read from job history file is not the expected" +
         " status", status.equals(jobInfo.getJobStatus()));
  }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

  }

  private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(),
        appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
      LOG.info("Got an error parsing job-history file" +
          ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

    if (this.jobInfo != null) {
      return;
    }
   
    if (historyFileAbsolute != null) {
      JobHistoryParser parser = null;
      try {
        parser =
            new JobHistoryParser(historyFileAbsolute.getFileSystem(conf),
                historyFileAbsolute);
        this.jobInfo = parser.parse();
      } catch (IOException e) {
        throw new YarnRuntimeException("Could not load history file "
            + historyFileAbsolute, e);
      }
      IOException parseException = parser.getParseException();
      if (parseException != null) {
        throw new YarnRuntimeException(
            "Could not parse history file " + historyFileAbsolute,
            parseException);
      }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

  }

  private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(),
        appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
      LOG.info("Got an error parsing job-history file" +
          ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo
View Full Code Here

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

  }

  private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(),
        appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
      LOG.info("Got an error parsing job-history file" +
          ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.