Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskAttemptID


  public void map(Text key, Text val, Context context)
      throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();

    UUID randomUuid = UUID.randomUUID();
    TaskAttemptID taskAttemptId = context.getTaskAttemptID();
    Long mostSignificant = (long) taskAttemptId.hashCode();
    Long leastSignificant = randomUuid.getLeastSignificantBits();
    UUID uniquieId = new UUID(mostSignificant, leastSignificant);

    String delimeter = conf.get(DELIMITER_CONF);
    String value = val.toString();
View Full Code Here


     */
    public MockReduceContext(final Configuration configuration,
        final List<Pair<KEYIN, List<VALUEIN>>> in,
        final Counters counters) throws IOException, InterruptedException {
      super(configuration,
            new TaskAttemptID("mrunit-jt", 0, false, 0, 0),
            new MockRawKeyValueIterator(), null, null, null,
            new MockOutputCommitter(), new MockReporter(counters), null,
            (Class) Text.class, (Class) Text.class);
      this.inputIter = in.iterator();
      this.output = new MockOutputCollector<KEYOUT, VALUEOUT>();
View Full Code Here

    public MockMapContext(final Configuration configuration,
        final List<Pair<KEYIN, VALUEIN>> in,
        final Counters counters) throws IOException, InterruptedException {

      super(configuration,
            new TaskAttemptID("mrunit-jt", 0, true, 0, 0),
            null, null, new MockOutputCommitter(), new MockReporter(counters), null);
      this.inputIter = in.iterator();
      this.output = new MockOutputCollector<KEYOUT, VALUEOUT>();
    }
View Full Code Here

   * @return masked {@link TaskAttemptID} with empty {@link JobID}.
   */
  private TaskAttemptID maskAttemptID(TaskAttemptID attemptId) {
    JobID jobId = new JobID();
    TaskID taskId = attemptId.getTaskID();
    return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(),
        attemptId.isMap(), taskId.getId(), attemptId.getId());
  }
View Full Code Here

          loggedTaskMap.put(maskTaskID(TaskID.forName(map.taskID)), map);

          for (LoggedTaskAttempt mapAttempt : map.getAttempts()) {
            mapAttempt = sanitizeLoggedTaskAttempt(mapAttempt);
            if (mapAttempt != null) {
              TaskAttemptID id = TaskAttemptID.forName(mapAttempt
                  .getAttemptID());
              loggedTaskAttemptMap.put(maskAttemptID(id), mapAttempt);
            }
          }
        }
      }
      for (LoggedTask reduce : job.getReduceTasks()) {
        reduce = sanitizeLoggedTask(reduce);
        if (reduce != null) {
          loggedTaskMap.put(maskTaskID(TaskID.forName(reduce.taskID)), reduce);

          for (LoggedTaskAttempt reduceAttempt : reduce.getAttempts()) {
            reduceAttempt = sanitizeLoggedTaskAttempt(reduceAttempt);
            if (reduceAttempt != null) {
              TaskAttemptID id = TaskAttemptID.forName(reduceAttempt
                  .getAttemptID());
              loggedTaskAttemptMap.put(maskAttemptID(id), reduceAttempt);
            }
          }
        }
View Full Code Here

    return taskInfo;
  }

  private TaskAttemptID makeTaskAttemptID(TaskType taskType, int taskNumber,
      int taskAttemptNumber) {
    return new TaskAttemptID(new TaskID(JobID.forName(job.getJobID()),
        TaskType.MAP == taskType, taskNumber), taskAttemptNumber);
  }
View Full Code Here

  }

  private LoggedTaskAttempt getLoggedTaskAttempt(TaskType taskType,
      int taskNumber, int taskAttemptNumber) {
    buildMaps();
    TaskAttemptID id =
        new TaskAttemptID(getMaskedTaskID(taskType, taskNumber),
            taskAttemptNumber);
    return loggedTaskAttemptMap.get(id);
  }
View Full Code Here

  public void testRecordReaderInit() throws InterruptedException, IOException {
    // Test that we properly initialize the child recordreader when
    // CombineFileInputFormat and CombineFileRecordReader are used.

    TaskAttemptID taskId = new TaskAttemptID("jt", 0, true, 0, 0);
    Configuration conf1 = new Configuration();
    conf1.set(DUMMY_KEY, "STATE1");
    TaskAttemptContext context1 = new TaskAttemptContext(conf1, taskId);

    // This will create a CombineFileRecordReader that itself contains a
View Full Code Here

  public void testReinit() throws Exception {
    // Test that a split containing multiple files works correctly,
    // with the child RecordReader getting its initialize() method
    // called a second time.
    TaskAttemptID taskId = new TaskAttemptID("jt", 0, true, 0, 0);
    Configuration conf = new Configuration();
    TaskAttemptContext context = new TaskAttemptContext(conf, taskId);

    // This will create a CombineFileRecordReader that itself contains a
    // DummyRecordReader.
View Full Code Here

     */
    public static TaskAttemptID newTaskAttemptId(TaskID taskId) {
        if (taskId == null) {
            throw new IllegalArgumentException("taskId must not be null"); //$NON-NLS-1$
        }
        return new TaskAttemptID(taskId, 0);
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskAttemptID

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.