Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskAttemptID


  public MockReduceContext(Configuration configuration,
                           final List<Pair<KEYIN, List<VALUEIN>>> in,
                           final Counters counters)
  throws IOException, InterruptedException {
    super(configuration,
          new TaskAttemptID("mrunit-jt", 0, TaskType.REDUCE, 0, 0),
          new MockRawKeyValueIterator(), null, null, null,
          new MockOutputCommitter(), new MockReporter(counters), null,
          (Class) Text.class, (Class) Text.class);
    this.inputIter = in.iterator();
    this.output = new MockOutputCollector<KEYOUT, VALUEOUT>();
View Full Code Here


   */
  private TaskAttemptID maskAttemptID(TaskAttemptID attemptId) {
    JobID jobId = new JobID();
    TaskType taskType = attemptId.getTaskType();
    TaskID taskId = attemptId.getTaskID();
    return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), taskType,
        taskId.getId(), attemptId.getId());
  }
View Full Code Here

          loggedTaskMap.put(maskTaskID(TaskID.forName(map.taskID)), map);

          for (LoggedTaskAttempt mapAttempt : map.getAttempts()) {
            mapAttempt = sanitizeLoggedTaskAttempt(mapAttempt);
            if (mapAttempt != null) {
              TaskAttemptID id = TaskAttemptID.forName(mapAttempt
                  .getAttemptID());
              loggedTaskAttemptMap.put(maskAttemptID(id), mapAttempt);
            }
          }
        }
      }
      for (LoggedTask reduce : job.getReduceTasks()) {
        reduce = sanitizeLoggedTask(reduce);
        if (reduce != null) {
          loggedTaskMap.put(maskTaskID(TaskID.forName(reduce.taskID)), reduce);

          for (LoggedTaskAttempt reduceAttempt : reduce.getAttempts()) {
            reduceAttempt = sanitizeLoggedTaskAttempt(reduceAttempt);
            if (reduceAttempt != null) {
              TaskAttemptID id = TaskAttemptID.forName(reduceAttempt
                  .getAttemptID());
              loggedTaskAttemptMap.put(maskAttemptID(id), reduceAttempt);
            }
          }
        }
View Full Code Here

    return taskInfo;
  }

  private TaskAttemptID makeTaskAttemptID(TaskType taskType, int taskNumber,
      int taskAttemptNumber) {
    return new TaskAttemptID(new TaskID(JobID.forName(job.getJobID()),
        taskType, taskNumber), taskAttemptNumber);
  }
View Full Code Here

  }

  private LoggedTaskAttempt getLoggedTaskAttempt(TaskType taskType,
      int taskNumber, int taskAttemptNumber) {
    buildMaps();
    TaskAttemptID id =
        new TaskAttemptID(getMaskedTaskID(taskType, taskNumber),
            taskAttemptNumber);
    return loggedTaskAttemptMap.get(id);
  }
View Full Code Here

    AvroJob.setOutputKeySchema(job, writerSchema);
    TaskAttemptContext context = createMock(TaskAttemptContext.class);
    expect(context.getConfiguration())
        .andReturn(job.getConfiguration()).anyTimes();
    expect(context.getTaskAttemptID())
        .andReturn(new TaskAttemptID("id", 1, true, 1, 1))
        .anyTimes();

    // Create a mock record writer.
    @SuppressWarnings("unchecked")
    RecordWriter<AvroKey<Integer>, NullWritable> expectedRecordWriter
View Full Code Here

  private boolean _optimize = true;
  private INDEXING_TYPE _indexingType = INDEXING_TYPE.REBUILD;
  private transient ZooKeeper _zooKeeper;

  public String getShardName(TaskAttemptContext context) {
    TaskAttemptID taskAttemptID = context.getTaskAttemptID();
    int id = taskAttemptID.getTaskID().getId();
    return BlurUtil.getShardName(BlurConstants.SHARD_PREFIX, id);
  }
View Full Code Here

      HBaseTestingUtility.getTestDir("test_LATEST_TIMESTAMP_isReplaced");
    try {
      Job job = new Job(conf);
      FileOutputFormat.setOutputPath(job, dir);
      context = new TaskAttemptContext(job.getConfiguration(),
        new TaskAttemptID());
      HFileOutputFormat hof = new HFileOutputFormat();
      writer = hof.getRecordWriter(context);
      final byte [] b = Bytes.toBytes("b");

      // Test 1.  Pass a KV that has a ts of LATEST_TIMESTAMP.  It should be
View Full Code Here

    }
    info.completedTaskAttemptsMap.put(event.getTaskAttemptId(), attemptInfo);
  }

  private void handleTaskAttemptStartedEvent(TaskAttemptStartedEvent event) {
    TaskAttemptID attemptId = event.getTaskAttemptId();
    TaskInfo taskInfo = info.tasksMap.get(event.getTaskId());
   
    TaskAttemptInfo attemptInfo = new TaskAttemptInfo();
    attemptInfo.startTime = event.getStartTime();
    attemptInfo.attemptId = event.getTaskAttemptId();
View Full Code Here

    counters = cluster.getJob(jobID).getCounters();
    Assert.assertEquals(0, counters.countCounters());
    Job job = cluster.getJob(jobID);
    org.apache.hadoop.mapreduce.TaskID taskId =
      new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0);
    TaskAttemptID tId = new TaskAttemptID(taskId, 0);

    //invoke all methods to check that no exception is thrown
    job.killJob();
    job.killTask(tId);
    job.failTask(tId);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskAttemptID

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.