Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskType


   *          raw {@link TaskAttemptID} read from trace
   * @return masked {@link TaskAttemptID} with empty {@link JobID}.
   */
  private TaskAttemptID maskAttemptID(TaskAttemptID attemptId) {
    JobID jobId = new JobID();
    TaskType taskType = attemptId.getTaskType();
    TaskID taskId = attemptId.getTaskID();
    return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), taskType,
        taskId.getId(), attemptId.getId());
  }
View Full Code Here


   * scaled according to locality in simulation and locality in trace.
   */
  @Override
  public TaskAttemptInfo getMapTaskAttemptInfoAdjusted(int taskNumber,
      int taskAttemptNumber, int locality) {
    TaskType taskType = TaskType.MAP;
    LoggedTask loggedTask = getLoggedTask(taskType, taskNumber);
    if (loggedTask == null) {
      // TODO insert parameters
      TaskInfo taskInfo = new TaskInfo(0, 0, 0, 0, 0);
      return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber,
View Full Code Here

        JobContext newContext = new JobContextImpl(new Configuration(conf), jobId);
        return newContext;
    }

    static public boolean isMap(TaskAttemptID taskAttemptID) {
        TaskType type = taskAttemptID.getTaskType();
        if (type==TaskType.MAP)
            return true;
       
        return false;
    }
View Full Code Here

   * scaled according to locality in simulation and locality in trace.
   */
  @Override
  public TaskAttemptInfo getMapTaskAttemptInfoAdjusted(int taskNumber,
      int taskAttemptNumber, int locality) {
    TaskType taskType = TaskType.MAP;
    LoggedTask loggedTask = getLoggedTask(taskType, taskNumber);
    if (loggedTask == null) {
      // TODO insert parameters
      TaskInfo taskInfo = new TaskInfo(0, 0, 0, 0, 0);
      return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber,
View Full Code Here

  private TaskAttemptInfo getMockTaskAttemptInfo(TaskAttemptID tai,
      TaskAttemptState tas) {

    ContainerId ci = mock(ContainerId.class);
    Counters counters = mock(Counters.class);
    TaskType tt = TaskType.MAP;

    long finishTime = System.currentTimeMillis();

    TaskAttemptInfo mockTAinfo = mock(TaskAttemptInfo.class);
View Full Code Here

      }
    }
  }

  void setFirstTaskLaunchTime(TaskInProgress tip) {
    TaskType key = tip.getFirstTaskType();

    synchronized(firstTaskLaunchTimes) {
      // Could be optimized to do only one lookup with a little more code
      if (!firstTaskLaunchTimes.containsKey(key)) {
        firstTaskLaunchTimes.put(key, tip.getExecStartTime());
View Full Code Here

            taskContext = new org.apache.hadoop.mapred.TaskAttemptContextImpl(new JobConf(getConfig()),
                TypeConverter.fromYarn(aId));
          }
         
          try {
            TaskType type = taskContext.getTaskAttemptID().getTaskID().getTaskType();
            int numReducers = taskContext.getConfiguration().getInt(MRJobConfig.NUM_REDUCES, 1);
            if(type == TaskType.REDUCE || (type == TaskType.MAP && numReducers <= 0)) {
              committer.recoverTask(taskContext);
              LOG.info("Recovered output from task attempt " + attInfo.getAttemptId());
            } else {
View Full Code Here

        case SUCCEEDED:
          //recover the task output
          TaskAttemptContext taskContext = new TaskAttemptContextImpl(getConfig(),
              attInfo.getAttemptId());
          try {
            TaskType type = taskContext.getTaskAttemptID().getTaskID().getTaskType();
            int numReducers = taskContext.getConfiguration().getInt(MRJobConfig.NUM_REDUCES, 1);
            if(type == TaskType.REDUCE || (type == TaskType.MAP && numReducers <= 0)) {
              committer.recoverTask(taskContext);
              LOG.info("Recovered output from task attempt " + attInfo.getAttemptId());
            } else {
View Full Code Here

    TaskSplitMetaInfo split =
     taskSplitMetaInfo[taskAttemptID.getTaskID().getId()];
    int locality = getClosestLocality(taskTracker, split);

    TaskID taskId = taskAttemptID.getTaskID();
    TaskType taskType = taskAttemptID.getTaskType();
    if (taskId.getTaskType() != TaskType.MAP) {
      assert false : "Task " + taskId + " is not MAP :" + taskId.getTaskType();
    }
   
    TaskAttemptInfo taskAttemptInfo = jobStory.getMapTaskAttemptInfoAdjusted(
View Full Code Here

   */
  private TaskAttemptInfo getReduceTaskAttemptInfo(TaskTracker taskTracker,
      TaskAttemptID taskAttemptID) {
    assert (taskAttemptID.getTaskType() == TaskType.REDUCE);
    TaskID taskId = taskAttemptID.getTaskID();
    TaskType taskType = taskAttemptID.getTaskType();

    TaskAttemptInfo taskAttemptInfo = jobStory.getTaskAttemptInfo(taskType,
        taskId.getId(), taskAttemptID.getId());
    if (LOG.isDebugEnabled()) {
      LOG.debug("get an attempt: "
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskType

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.