Package org.apache.hadoop.mapreduce.jobhistory

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser$TaskInfo


    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }

    JobHistoryParser parser = new JobHistoryParser(in);
    final EventReader realReader = new EventReader(in);
    EventReader reader = Mockito.mock(EventReader.class);
    if (numMaps == numSuccessfulMaps) {
      reader = realReader;
    } else {
      final AtomicInteger numFinishedEvents = new AtomicInteger(0)// Hack!
      Mockito.when(reader.getNextEvent()).thenAnswer(
          new Answer<HistoryEvent>() {
            public HistoryEvent answer(InvocationOnMock invocation)
                throws IOException {
              HistoryEvent event = realReader.getNextEvent();
              if (event instanceof TaskFinishedEvent) {
                numFinishedEvents.incrementAndGet();
              }
             
              if (numFinishedEvents.get() <= numSuccessfulMaps) {
                return event;
              } else {
                throw new IOException("test");
              }
            }
          }
        );
    }
   
    JobInfo jobInfo = parser.parse(reader);
   
    long numFinishedMaps =
        computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);
   
    if (numFinishedMaps != numMaps) {
      Exception parseException = parser.getParseException();
      Assert.assertNotNull("Didn't get expected parse exception",
          parseException);
    }
   
    Assert.assertEquals("Incorrect username ", System.getProperty("user.name"),
View Full Code Here


    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }

    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    Assert.assertNull("Caught an expected exception " + parseException,
        parseException);
    int noOffailedAttempts = 0;
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    for (Task task : job.getTasks().values()) {
View Full Code Here

    if (jobInfo != null) {
      return; //data already loaded
    }
   
    if (historyFileAbsolute != null) {
      JobHistoryParser parser = null;
      try {
        parser =
            new JobHistoryParser(historyFileAbsolute.getFileSystem(conf),
                historyFileAbsolute);
        jobInfo = parser.parse();
      } catch (IOException e) {
        throw new YarnException("Could not load history file "
            + historyFileAbsolute, e);
      }
      IOException parseException = parser.getParseException();
      if (parseException != null) {
        throw new YarnException(
            "Could not parse history file " + historyFileAbsolute,
            parseException);
      }
View Full Code Here

    FileSystem fileSys = logFile.getFileSystem(conf);
    // Check if the history file exists
    assertTrue("History file does not exist", fileSys.exists(logFile));

    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
    JobHistoryParser.JobInfo jobInfo = parser.parse();

    // validate format of job level key, values
    validateJobLevelKeyValuesFormat(jobInfo, status);

    // validate format of task level key, values
View Full Code Here

    FileSystem fileSys = logFile.getFileSystem(conf);
    // Check if the history file exists
    assertTrue("History file does not exist", fileSys.exists(logFile));

    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
   
    JobHistoryParser.JobInfo jobInfo = parser.parse();
    // Now the history file contents are available in jobInfo. Let us compare
    // them with the actual values from JT.
    validateJobLevelKeyValues(mr, job, jobInfo, conf);
    validateTaskLevelKeyValues(mr, job, jobInfo);
    validateTaskAttemptLevelKeyValues(mr, job, jobInfo);
View Full Code Here

    // check history file permission
    assertTrue("History file permissions does not match",
    fileSys.getFileStatus(logFile).getPermission().equals(
       new FsPermission(JobHistory.HISTORY_FILE_PERMISSION)));
   
    JobHistoryParser parser = new JobHistoryParser(fileSys,
        logFile.toUri().getPath());
    JobHistoryParser.JobInfo jobInfo = parser.parse();
   

    assertTrue("Job Status read from job history file is not the expected" +
         " status", status.equals(jobInfo.getJobStatus()));
  }
View Full Code Here

  }

  private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(),
        appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
      LOG.info("Got an error parsing job-history file" +
          ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo
View Full Code Here

      } catch (IOException ioe) {
        LOG.info("Can not open history file: " + historyFilePath, ioe);
        throw (new Exception("Can not open History File"));
      }

      JobHistoryParser parser = new JobHistoryParser(in);
      final EventReader realReader = new EventReader(in);
      EventReader reader = Mockito.mock(EventReader.class);
      if (numMaps == numSuccessfulMaps) {
        reader = realReader;
      } else {
        final AtomicInteger numFinishedEvents = new AtomicInteger(0); // Hack!
        Mockito.when(reader.getNextEvent()).thenAnswer(
            new Answer<HistoryEvent>() {
              public HistoryEvent answer(InvocationOnMock invocation)
                  throws IOException {
                HistoryEvent event = realReader.getNextEvent();
                if (event instanceof TaskFinishedEvent) {
                  numFinishedEvents.incrementAndGet();
                }

                if (numFinishedEvents.get() <= numSuccessfulMaps) {
                  return event;
                } else {
                  throw new IOException("test");
                }
              }
            });
      }

      jobInfo = parser.parse(reader);

      numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);

      if (numFinishedMaps != numMaps) {
        Exception parseException = parser.getParseException();
        Assert.assertNotNull("Didn't get expected parse exception",
            parseException);
      }
    }
View Full Code Here

      } catch (IOException ioe) {
        LOG.info("Can not open history file: " + historyFilePath, ioe);
        throw (new Exception("Can not open History File"));
      }

      JobHistoryParser parser = new JobHistoryParser(in);
      JobInfo jobInfo = parser.parse();
      Exception parseException = parser.getParseException();
      Assert.assertNull("Caught an expected exception " + parseException,
          parseException);
      int noOffailedAttempts = 0;
      Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
      for (Task task : job.getTasks().values()) {
View Full Code Here

      } catch (IOException ioe) {
        LOG.info("Can not open history file: " + historyFilePath, ioe);
        throw (new Exception("Can not open History File"));
      }

      JobHistoryParser parser = new JobHistoryParser(in);
      JobInfo jobInfo = parser.parse();
      Exception parseException = parser.getParseException();
      Assert.assertNull("Caught an expected exception " + parseException,
          parseException);
      for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
        TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
        CompletedTask ct = new CompletedTask(yarnTaskID, entry.getValue());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser$TaskInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.