Package org.apache.hadoop.mapreduce.v2.api.records

Examples of org.apache.hadoop.mapreduce.v2.api.records.TaskId


  }

  public void verifyAMTask(JSONArray arr, Job job, String type)
      throws JSONException {
    for (Task task : job.getTasks().values()) {
      TaskId id = task.getID();
      String tid = MRApps.toString(id);
      Boolean found = false;
      if (type != null && task.getType() == MRApps.taskType(type)) {

        for (int i = 0; i < arr.length(); i++) {
View Full Code Here


  public void verifyTaskGeneric(Task task, String id, String state,
      String type, String successfulAttempt, long startTime, long finishTime,
      long elapsedTime, float progress) {

    TaskId taskid = task.getID();
    String tid = MRApps.toString(taskid);
    TaskReport report = task.getReport();

    WebServicesTestUtils.checkStringMatch("id", tid, id);
    WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
View Full Code Here

  public void verifyAMTaskXML(NodeList nodes, Job job) {

    assertEquals("incorrect number of elements", 2, nodes.getLength());

    for (Task task : job.getTasks().values()) {
      TaskId id = task.getID();
      String tid = MRApps.toString(id);
      Boolean found = false;
      for (int i = 0; i < nodes.getLength(); i++) {
        Element element = (Element) nodes.item(i);
View Full Code Here

      return response;
    }

    @Override
    public GetTaskReportResponse getTaskReport(GetTaskReportRequest request) throws YarnRemoteException {
      TaskId taskId = request.getTaskId();
      Job job = verifyAndGetJob(taskId.getJobId());
      GetTaskReportResponse response = recordFactory.newRecordInstance(GetTaskReportResponse.class);
      response.setTaskReport(job.getTask(taskId).getReport());
      return response;
    }
View Full Code Here

        "org.apache.hadoop.mapreduce.FileSystemCounter");
    params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
   
    // remove counters from one task attempt
    // to test handling of missing counters
    TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
    Job job = appContext.getJob(taskID.getJobId());
    Task task = job.getTask(taskID);
    TaskAttempt attempt = task.getAttempts().values().iterator().next();
    attempt.getReport().setCounters(null);
   
    WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
View Full Code Here

    }
   
    if (loadTasks) {
    for (Map.Entry<org.apache.hadoop.mapreduce.TaskID, TaskInfo> entry : jobInfo
        .getAllTasks().entrySet()) {
      TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
      TaskInfo taskInfo = entry.getValue();
      Task task = new CompletedTask(yarnTaskID, taskInfo);
      tasks.put(yarnTaskID, task);
      if (task.getType() == TaskType.MAP) {
        mapTasks.put(task.getID(), task);
View Full Code Here

    ApplicationId appId = BuilderUtils.newApplicationId(12345, 67);
    ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
      appId, 3);
    JobId jobId = MRBuilderUtils.newJobId(appId, 8);
    TaskId taskId = MRBuilderUtils.newTaskId(jobId, 9, TaskType.MAP);
    TaskAttemptId taskAttemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
    ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 10);

    AppContext context = mock(AppContext.class);
    CustomContainerLauncher containerLauncher = new CustomContainerLauncher(
View Full Code Here

  public void testPoolLimits() throws InterruptedException {
    ApplicationId appId = BuilderUtils.newApplicationId(12345, 67);
    ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
      appId, 3);
    JobId jobId = MRBuilderUtils.newJobId(appId, 8);
    TaskId taskId = MRBuilderUtils.newTaskId(jobId, 9, TaskType.MAP);
    TaskAttemptId taskAttemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
    ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 10);

    AppContext context = mock(AppContext.class);
    CustomContainerLauncher containerLauncher = new CustomContainerLauncher(
View Full Code Here

    }
    return map;
  }

  public static Task newTask(JobId jid, int i, int m) {
    final TaskId tid = Records.newRecord(TaskId.class);
    tid.setJobId(jid);
    tid.setId(i);
    tid.setTaskType(TASK_TYPES.next());
    final TaskReport report = newTaskReport(tid);
    final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m);
    return new Task() {
      @Override
      public TaskId getID() {
        return tid;
      }

      @Override
      public TaskReport getReport() {
        return report;
      }

      @Override
      public Counters getCounters() {
        return new Counters(
          TypeConverter.fromYarn(report.getCounters()));
      }

      @Override
      public float getProgress() {
        return report.getProgress();
      }

      @Override
      public TaskType getType() {
        return tid.getTaskType();
      }

      @Override
      public Map<TaskAttemptId, TaskAttempt> getAttempts() {
        return attempts;
View Full Code Here

  class SpeculationRequestEventHandler implements EventHandler<TaskEvent> {

    @Override
    public void handle(TaskEvent event) {
      TaskId taskID = event.getTaskID();
      Task task = myJob.getTask(taskID);

      Assert.assertEquals
          ("Wrong type event", TaskEventType.T_ADD_SPEC_ATTEMPT, event.getType());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.v2.api.records.TaskId

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.