Package org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo


      if (tasksLoaded.get()) {
        return;
      }
      for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
        TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
        TaskInfo taskInfo = entry.getValue();
        Task task = new CompletedTask(yarnTaskID, taskInfo);
        tasks.put(yarnTaskID, task);
        if (task.getType() == TaskType.MAP) {
          mapTasks.put(task.getID(), task);
        } else if (task.getType() == TaskType.REDUCE) {
View Full Code Here


  @Test
  public void testTaskStartTimes() {
   
    TaskId taskId = Mockito.mock(TaskId.class);
    TaskInfo taskInfo = Mockito.mock(TaskInfo.class);
    Map<TaskAttemptID, TaskAttemptInfo> taskAttempts
      = new TreeMap<TaskAttemptID, TaskAttemptInfo>();
   
    TaskAttemptID id = new TaskAttemptID("0", 0, TaskType.MAP, 0, 0);
    TaskAttemptInfo info = Mockito.mock(TaskAttemptInfo.class);
    Mockito.when(info.getAttemptId()).thenReturn(id);
    Mockito.when(info.getStartTime()).thenReturn(10l);
    taskAttempts.put(id, info);
   
    id = new TaskAttemptID("1", 0, TaskType.MAP, 1, 1);
    info = Mockito.mock(TaskAttemptInfo.class);
    Mockito.when(info.getAttemptId()).thenReturn(id);
    Mockito.when(info.getStartTime()).thenReturn(20l);
    taskAttempts.put(id, info);
   
   
    Mockito.when(taskInfo.getAllTaskAttempts()).thenReturn(taskAttempts);
    CompletedTask task = new CompletedTask(taskId, taskInfo);
    TaskReport report = task.getReport();

    // Make sure the startTime returned by report is the lesser of the
    // attempy launch times
View Full Code Here

  }

  protected void scheduleTasks(Set<TaskId> taskIDs,
      boolean recoverTaskOutput) {
    for (TaskId taskID : taskIDs) {
      TaskInfo taskInfo = completedTasksFromPreviousRun.remove(taskID);
      if (taskInfo != null) {
        eventHandler.handle(new TaskRecoverEvent(taskID, taskInfo,
            committer, recoverTaskOutput));
      } else {
        eventHandler.handle(new TaskEvent(taskID, TaskEventType.T_SCHEDULE));
View Full Code Here

      if (tasksLoaded.get()) {
        return;
      }
      for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
        TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
        TaskInfo taskInfo = entry.getValue();
        Task task = new CompletedTask(yarnTaskID, taskInfo);
        tasks.put(yarnTaskID, task);
        if (task.getType() == TaskType.MAP) {
          mapTasks.put(task.getID(), task);
        } else if (task.getType() == TaskType.REDUCE) {
View Full Code Here

  @Test (timeout=5000)
  public void testTaskStartTimes() {
   
    TaskId taskId = mock(TaskId.class);
    TaskInfo taskInfo = mock(TaskInfo.class);
    Map<TaskAttemptID, TaskAttemptInfo> taskAttempts
      = new TreeMap<TaskAttemptID, TaskAttemptInfo>();
   
    TaskAttemptID id = new TaskAttemptID("0", 0, TaskType.MAP, 0, 0);
    TaskAttemptInfo info = mock(TaskAttemptInfo.class);
    when(info.getAttemptId()).thenReturn(id);
    when(info.getStartTime()).thenReturn(10l);
    taskAttempts.put(id, info);
   
    id = new TaskAttemptID("1", 0, TaskType.MAP, 1, 1);
    info = mock(TaskAttemptInfo.class);
    when(info.getAttemptId()).thenReturn(id);
    when(info.getStartTime()).thenReturn(20l);
    taskAttempts.put(id, info);
   
   
    when(taskInfo.getAllTaskAttempts()).thenReturn(taskAttempts);
    CompletedTask task = new CompletedTask(taskId, taskInfo);
    TaskReport report = task.getReport();

    // Make sure the startTime returned by report is the lesser of the
    // attempy launch times
View Full Code Here

      Assert.assertFalse(taInfo.getContainerId().equals(fakeCid));
    }

    // Deep compare Job and JobInfo
    for (Task task : job.getTasks().values()) {
      TaskInfo taskInfo = allTasks.get(
          TypeConverter.fromYarn(task.getID()));
      Assert.assertNotNull("TaskInfo not found", taskInfo);
      for (TaskAttempt taskAttempt : task.getAttempts().values()) {
        TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(
            TypeConverter.fromYarn((taskAttempt.getID())));
        Assert.assertNotNull("TaskAttemptInfo not found", taskAttemptInfo);
        Assert.assertEquals("Incorrect shuffle port for task attempt",
            taskAttempt.getShufflePort(), taskAttemptInfo.getShufflePort());
View Full Code Here

      return handler;
    }
  }

  private TaskAttemptInfo getTaskAttemptInfo(TaskAttemptId id) {
    TaskInfo taskInfo = completedTasks.get(id.getTaskId());
    return taskInfo.getAllTaskAttempts().get(TypeConverter.fromYarn(id));
  }
View Full Code Here

        else if (event.getType() == TaskEventType.T_ATTEMPT_FAILED
            || event.getType() == TaskEventType.T_ATTEMPT_KILLED
            || event.getType() == TaskEventType.T_ATTEMPT_SUCCEEDED) {
          TaskTAttemptEvent tEvent = (TaskTAttemptEvent) event;
          LOG.info("Recovered Task attempt " + tEvent.getTaskAttemptID());
          TaskInfo taskInfo = completedTasks.get(tEvent.getTaskAttemptID()
              .getTaskId());
          taskInfo.getAllTaskAttempts().remove(
              TypeConverter.fromYarn(tEvent.getTaskAttemptID()));
          // remove the task info from completed tasks if all attempts are
          // recovered
          if (taskInfo.getAllTaskAttempts().size() == 0) {
            completedTasks.remove(tEvent.getTaskAttemptID().getTaskId());
            // checkForRecoveryComplete
            LOG.info("CompletedTasks() " + completedTasks.size());
            if (completedTasks.size() == 0) {
              recoveryMode = false;
View Full Code Here

   
    if (loadTasks) {
    for (Map.Entry<org.apache.hadoop.mapreduce.TaskID, TaskInfo> entry : jobInfo
        .getAllTasks().entrySet()) {
      TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
      TaskInfo taskInfo = entry.getValue();
      Task task = new CompletedTask(yarnTaskID, taskInfo);
      tasks.put(yarnTaskID, task);
      if (task.getType() == TaskType.MAP) {
        mapTasks.put(task.getID(), task);
      } else if (task.getType() == TaskType.REDUCE) {
View Full Code Here

    TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2,
        TaskAttemptState.FAILED);
    mockTaskAttempts.put(taId2, mockTAinfo2);

    OutputCommitter mockCommitter = mock (OutputCommitter.class);
    TaskInfo mockTaskInfo = mock(TaskInfo.class);
    when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED");
    when(mockTaskInfo.getTaskId()).thenReturn(taskID);
    when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts);

    recoverMapTask.handle(
        new TaskRecoverEvent(taskId, mockTaskInfo,mockCommitter, true));

    ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.