Package org.apache.hadoop.mapred.JobHistory

Examples of org.apache.hadoop.mapred.JobHistory.JobInfo


   * @throws IOException
   */
  static JobInfo getJobInfo(Path logFile, FileSystem fs,
      JobTracker jobTracker, String user) throws IOException {
    String jobid = getJobID(logFile.getName());
    JobInfo jobInfo = null;
    synchronized(jobHistoryCache) {
      jobInfo = jobHistoryCache.remove(jobid);
      if (jobInfo == null) {
        jobInfo = new JobHistory.JobInfo(jobid);
        LOG.info("Loading Job History file "+jobid + ".   Cache size is " +
            jobHistoryCache.size());
        DefaultJobHistoryParser.parseJobTasks(logFile.toUri().getPath(),
            jobInfo, fs);
      }
      jobHistoryCache.put(jobid, jobInfo);
      int CACHE_SIZE =
        jobTracker.conf.getInt("mapred.job.tracker.jobhistory.lru.cache.size", 5);
      if (jobHistoryCache.size() > CACHE_SIZE) {
        Iterator<Map.Entry<String, JobInfo>> it =
          jobHistoryCache.entrySet().iterator();
        String removeJobId = it.next().getKey();
        it.remove();
        LOG.info("Job History file removed form cache "+removeJobId);
      }
    }

    UserGroupInformation currentUser;
    if (user == null) {
      currentUser = UserGroupInformation.getCurrentUser();
    } else {
      currentUser = UserGroupInformation.createRemoteUser(user);
    }

    // Authorize the user for view access of this job
    jobTracker.getACLsManager().checkAccess(jobid, currentUser,
        jobInfo.getJobQueue(), Operation.VIEW_JOB_DETAILS,
        jobInfo.get(Keys.USER), jobInfo.getJobACLs().get(JobACL.VIEW_JOB));

    return jobInfo;
  }
View Full Code Here


      HttpServletResponse response, final JobTracker jobTracker,
      final FileSystem fs, final Path logFile) throws IOException,
      InterruptedException, ServletException {
    String jobid = getJobID(logFile.getName());
    String user = request.getRemoteUser();
    JobInfo job = null;
    if (user != null) {
      try {
        job = JSPUtil.getJobInfo(logFile, fs, jobTracker, user);
      } catch (AccessControlException e) {
        String errMsg =
View Full Code Here

  public static JobInfo getJobInfo(HttpServletRequest request, FileSystem fs)
      throws IOException {
    String jobid = request.getParameter("jobid");
    String logFile = request.getParameter("logFile");
    synchronized(jobHistoryCache) {
      JobInfo jobInfo = jobHistoryCache.remove(jobid);
      if (jobInfo == null) {
        jobInfo = new JobHistory.JobInfo(jobid);
        LOG.info("Loading Job History file "+jobid + ".   Cache size is " +
            jobHistoryCache.size());
        DefaultJobHistoryParser.parseJobTasks( logFile, jobInfo, fs) ;
View Full Code Here

   
    FSDataOutputStream out = fs.create(historyPath);
    writeHistoryFile(out, true);
    out.close();
   
    JobInfo job = new JobHistory.JobInfo(JOB);
    DefaultJobHistoryParser.parseJobTasks(historyPath.toString(), job, fs);
   
    assertTrue("Failed to parse old jobhistory files",
               job.getAllTasks().size() > 0);
  }
View Full Code Here

   
    FSDataOutputStream out = fs.create(historyPath);
    writeHistoryFile(out, false);
    out.close();
   
    JobInfo job = new JobHistory.JobInfo(JOB);
    DefaultJobHistoryParser.parseJobTasks(historyPath.toString(), job, fs);
   
    assertTrue("Failed to parse old jobhistory files",
               job.getAllTasks().size() > 0);
  }
View Full Code Here

   
    /*
     * Read the job information necessary for post performance analysis
     */
    JobConf jobConf = new JobConf();
    JobInfo jobInfo = new JobInfo("");
    readJobInformation(jobConf, jobInfo);
    this._jobExecutionStatistics = new JobStatistics(jobConf, jobInfo);
  }
View Full Code Here

   
    FSDataOutputStream out = fs.create(historyPath);
    writeHistoryFile(out, true);
    out.close();
   
    JobInfo job = new JobHistory.JobInfo(JOB);
    DefaultJobHistoryParser.parseJobTasks(historyPath.toString(), job, fs);
   
    assertTrue("Failed to parse old jobhistory files",
               job.getAllTasks().size() > 0);
   
    // cleanup
    fs.delete(TEST_DIR, true);
  }
View Full Code Here

   
    FSDataOutputStream out = fs.create(historyPath);
    writeHistoryFile(out, false);
    out.close();
   
    JobInfo job = new JobHistory.JobInfo(JOB);
    DefaultJobHistoryParser.parseJobTasks(historyPath.toString(), job, fs);
   
    assertTrue("Failed to parse old jobhistory files",
               job.getAllTasks().size() > 0);
   
    // cleanup
    fs.delete(TEST_DIR, true);
  }
View Full Code Here

   * @throws IOException
   */
  static JobInfo getJobInfo(Path logFile, FileSystem fs,
      JobConf jobConf, ACLsManager acLsManager, String user) throws IOException {
    String jobid = getJobID(logFile.getName());
    JobInfo jobInfo = null;
    synchronized(jobHistoryCache) {
      jobInfo = jobHistoryCache.remove(jobid);
      if (jobInfo == null) {
        jobInfo = new JobHistory.JobInfo(jobid);
        LOG.info("Loading Job History file "+jobid + ".   Cache size is " +
            jobHistoryCache.size());
        DefaultJobHistoryParser.parseJobTasks(logFile.toUri().getPath(),
            jobInfo, fs);
      }
      jobHistoryCache.put(jobid, jobInfo);
      int CACHE_SIZE =
        jobConf.getInt("mapred.job.tracker.jobhistory.lru.cache.size", 5);
      if (jobHistoryCache.size() > CACHE_SIZE) {
        Iterator<Map.Entry<String, JobInfo>> it =
          jobHistoryCache.entrySet().iterator();
        String removeJobId = it.next().getKey();
        it.remove();
        LOG.info("Job History file removed form cache "+removeJobId);
      }
    }

    UserGroupInformation currentUser;
    if (user == null) {
      currentUser = UserGroupInformation.getCurrentUser();
    } else {
      currentUser = UserGroupInformation.createRemoteUser(user);
    }

    // Authorize the user for view access of this job
    acLsManager.checkAccess(jobid, currentUser,
        jobInfo.getJobQueue(), Operation.VIEW_JOB_DETAILS,
        jobInfo.get(Keys.USER), jobInfo.getJobACLs().get(JobACL.VIEW_JOB));

    return jobInfo;
  }
View Full Code Here

      final ACLsManager acLsManager, final FileSystem fs,
      final Path logFile) throws IOException,
      InterruptedException, ServletException {
    String jobid = getJobID(logFile.getName());
    String user = request.getRemoteUser();
    JobInfo job = null;
    if (user != null) {
      try {
        job = JSPUtil.getJobInfo(logFile, fs, jobConf, acLsManager, user);
      } catch (AccessControlException e) {
        String trackerAddress = jobConf.get("mapred.job.tracker.http.address");
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.JobHistory.JobInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.