Package org.apache.hadoop.mapreduce.jobhistory

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser$TaskInfo


        JobHistory.getJobIDFromHistoryFilePath(logFile).toString();
    JobInfo jobInfo = null;
    synchronized(jobHistoryCache) {
      jobInfo = jobHistoryCache.remove(jobid);
      if (jobInfo == null) {
        JobHistoryParser parser = new JobHistoryParser(fs, logFile);
        jobInfo = parser.parse();
        LOG.info("Loading Job History file "+jobid + ".   Cache size is " +
            jobHistoryCache.size());
      }
      jobHistoryCache.put(jobid, jobInfo);
      int CACHE_SIZE =
View Full Code Here


    FileSystem fileSys = logFile.getFileSystem(conf);
    // Check if the history file exists
    assertTrue("History file does not exist", fileSys.exists(logFile));

    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
    JobHistoryParser.JobInfo jobInfo = parser.parse();

    // validate format of job level key, values
    validateJobLevelKeyValuesFormat(jobInfo, status);

    // validate format of task level key, values
View Full Code Here

    FileSystem fileSys = logFile.getFileSystem(conf);
    // Check if the history file exists
    assertTrue("History file does not exist", fileSys.exists(logFile));

    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
   
    JobHistoryParser.JobInfo jobInfo = parser.parse();
    // Now the history file contents are available in jobInfo. Let us compare
    // them with the actual values from JT.
    validateJobLevelKeyValues(mr, job, jobInfo, conf);
    validateTaskLevelKeyValues(mr, job, jobInfo);
    validateTaskAttemptLevelKeyValues(mr, job, jobInfo);
View Full Code Here

    // check history file permission
    assertTrue("History file permissions does not match",
    fileSys.getFileStatus(logFile).getPermission().equals(
       new FsPermission(JobHistory.HISTORY_FILE_PERMISSION)));
   
    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
    JobHistoryParser.JobInfo jobInfo = parser.parse();
   

    assertTrue("Job Status read from job history file is not the expected" +
         " status", status.equals(jobInfo.getJobStatus()));
  }
View Full Code Here

    jobConf.addResource(jobConfFileUrl);
   
    /*
     * Read JobHistoryFile and build job counters to evaluate diagnostic rules
     */
    JobHistoryParser parser;
    JobInfo jobInfo;
    if (jobHistoryFileUrl.getProtocol().equals("hdfs")) {
      parser = new JobHistoryParser(FileSystem.get(jobConf),
                                    jobHistoryFileUrl.getPath());
      jobInfo = parser.parse();
    } else if (jobHistoryFileUrl.getProtocol().equals("file")) {
      parser = new JobHistoryParser(FileSystem.getLocal(jobConf),
                                    jobHistoryFileUrl.getPath());
      jobInfo = parser.parse();
    } else {
      throw new Exception("Malformed URL. Protocol: "+
          jobHistoryFileUrl.getProtocol());
    }
    return jobInfo;
View Full Code Here

    Path historyFilePath = new Path (historyDir.toString(),
      historyFileName);

    System.out.println("History File is " + historyFilePath.toString());

    JobHistoryParser parser =
      new JobHistoryParser(fs, historyFilePath);

    JobHistoryParser.JobInfo jobInfo = parser.parse();

    assertTrue (jobInfo.getUsername().equals(username));
    assertTrue(jobInfo.getJobname().equals(weirdJob));
    assertTrue(jobInfo.getJobQueueName().equals(weirdJobQueueName));
    assertTrue(jobInfo.getJobConfPath().equals(weirdPath));
View Full Code Here

        validateViewJob(jobTasksJSP + "&taskType=" + taskType + "&status="
            + state, "GET");
      }
    }

    JobHistoryParser parser =
        new JobHistoryParser(new Path(historyFileName).getFileSystem(conf),
            historyFileName);
    JobInfo jobInfo = parser.parse();
    Map<TaskID, TaskInfo> tipsMap = jobInfo.getAllTasks();
    for (TaskID tip : tipsMap.keySet()) {
      // validate access of taskdetailshistory.jsp
      validateViewJob(jtURL + "/taskdetailshistory.jsp?logFile="
          + historyFileName + "&tipid=" + tip.toString(), "GET");
View Full Code Here

      } catch (IOException ioe) {
        LOG.info("Can not open history file: " + historyFilePath, ioe);
        throw (new Exception("Can not open History File"));
      }

      JobHistoryParser parser = new JobHistoryParser(in);
      final EventReader realReader = new EventReader(in);
      EventReader reader = Mockito.mock(EventReader.class);
      if (numMaps == numSuccessfulMaps) {
        reader = realReader;
      } else {
        final AtomicInteger numFinishedEvents = new AtomicInteger(0)// Hack!
        Mockito.when(reader.getNextEvent()).thenAnswer(
            new Answer<HistoryEvent>() {
              public HistoryEvent answer(InvocationOnMock invocation)
              throws IOException {
                HistoryEvent event = realReader.getNextEvent();
                if (event instanceof TaskFinishedEvent) {
                  numFinishedEvents.incrementAndGet();
                }

                if (numFinishedEvents.get() <= numSuccessfulMaps) {
                  return event;
                } else {
                  throw new IOException("test");
                }
              }
            }
        );
      }

      jobInfo = parser.parse(reader);

      numFinishedMaps =
        computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);

      if (numFinishedMaps != numMaps) {
        Exception parseException = parser.getParseException();
        Assert.assertNotNull("Didn't get expected parse exception",
            parseException);
      }
    }
   
View Full Code Here

    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }

    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    Assert.assertNull("Caught an expected exception " + parseException,
        parseException);
    int noOffailedAttempts = 0;
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    for (Task task : job.getTasks().values()) {
View Full Code Here

    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }

    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    Assert.assertNull("Caught an expected exception " + parseException,
        parseException);
    for (Map.Entry<TaskID,TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
      TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
      CompletedTask ct = new CompletedTask(yarnTaskID, entry.getValue());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser$TaskInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.