Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskType


            return new JobContextImpl(conf, jobId);
        }
    }

    static public boolean isMap(TaskAttemptID taskAttemptID) {
        TaskType type = taskAttemptID.getTaskType();
        if (type==TaskType.MAP)
            return true;

        return false;
    }
View Full Code Here


    LOG.info("Choosing data-local task " + tip.getTIPId());
    jobCounters.incrCounter(Counter.DATA_LOCAL_MAPS, 1);
  }

  void setFirstTaskLaunchTime(TaskInProgress tip) {
    TaskType key = tip.getFirstTaskType();

    synchronized(firstTaskLaunchTimes) {
      // Could be optimized to do only one lookup with a little more code
      if (!firstTaskLaunchTimes.containsKey(key)) {
        firstTaskLaunchTimes.put(key, tip.getExecStartTime());
View Full Code Here

      }
    }
  }

  void setFirstTaskLaunchTime(TaskInProgress tip) {
    TaskType key = tip.getFirstTaskType();

    synchronized(firstTaskLaunchTimes) {
      // Could be optimized to do only one lookup with a little more code
      if (!firstTaskLaunchTimes.containsKey(key)) {
        firstTaskLaunchTimes.put(key, tip.getExecStartTime());
View Full Code Here

          && (error != null || (status != null && !status
              .equalsIgnoreCase("success")))) {
        Task20LineHistoryEventEmitter that =
            (Task20LineHistoryEventEmitter) thatg;

        TaskType originalTaskType =
            that.originalTaskType == null ? Version20LogInterfaceUtils
                .get20TaskType(taskType) : that.originalTaskType;

        return new TaskFailedEvent(taskID, Long.parseLong(finishTime),
            originalTaskType, error, status, null);
View Full Code Here

   * scaled according to locality in simulation and locality in trace.
   */
  @Override
  public TaskAttemptInfo getMapTaskAttemptInfoAdjusted(int taskNumber,
      int taskAttemptNumber, int locality) {
    TaskType taskType = TaskType.MAP;
    LoggedTask loggedTask = getLoggedTask(taskType, taskNumber);
    if (loggedTask == null) {
      // TODO insert parameters
      TaskInfo taskInfo = new TaskInfo(0, 0, 0, 0, 0);
      return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber,
View Full Code Here

            return new JobContextImpl(conf, jobId);
        }
    }

    static public boolean isMap(TaskAttemptID taskAttemptID) {
        TaskType type = taskAttemptID.getTaskType();
        if (type==TaskType.MAP)
            return true;

        return false;
    }
View Full Code Here

   *          raw {@link TaskID} read from trace
   * @return masked {@link TaskID} with empty {@link JobID}.
   */
  private TaskID maskTaskID(TaskID taskId) {
    JobID jobId = new JobID();
    TaskType taskType = taskId.getTaskType();
    return new TaskID(jobId, taskType, taskId.getId());
  }
View Full Code Here

   *          raw {@link TaskAttemptID} read from trace
   * @return masked {@link TaskAttemptID} with empty {@link JobID}.
   */
  private TaskAttemptID maskAttemptID(TaskAttemptID attemptId) {
    JobID jobId = new JobID();
    TaskType taskType = attemptId.getTaskType();
    TaskID taskId = attemptId.getTaskID();
    return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), taskType,
        taskId.getId(), attemptId.getId());
  }
View Full Code Here

   * scaled according to locality in simulation and locality in trace.
   */
  @Override
  public TaskAttemptInfo getMapTaskAttemptInfoAdjusted(int taskNumber,
      int taskAttemptNumber, int locality) {
    TaskType taskType = TaskType.MAP;
    LoggedTask loggedTask = getLoggedTask(taskType, taskNumber);
    if (loggedTask == null) {
      // TODO insert parameters
      TaskInfo taskInfo = new TaskInfo(0, 0, 0, 0, 0);
      return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber,
View Full Code Here

            taskContext = new org.apache.hadoop.mapred.TaskAttemptContextImpl(new JobConf(getConfig()),
                TypeConverter.fromYarn(aId));
          }
         
          try {
            TaskType type = taskContext.getTaskAttemptID().getTaskID().getTaskType();
            int numReducers = taskContext.getConfiguration().getInt(MRJobConfig.NUM_REDUCES, 1);
            if(type == TaskType.REDUCE || (type == TaskType.MAP && numReducers <= 0)) {
              committer.recoverTask(taskContext);
              LOG.info("Recovered output from task attempt " + attInfo.getAttemptId());
            } else {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskType

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.