Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext


  /** {@inheritDoc} */
  public RecordWriter<K, V> getRecordWriter(FileSystem filesystem,
      JobConf job, String name, Progressable progress) throws IOException {
    org.apache.hadoop.mapreduce.RecordWriter<K, V> w = super.getRecordWriter(
      new TaskAttemptContext(job,
            TaskAttemptID.forName(job.get("mapred.task.id"))));
    org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter writer =
     (org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter) w;
    try {
      return new DBRecordWriter(writer.getConnection(), writer.getStatement());
View Full Code Here


    // CombineFileInputFormat and CombineFileRecordReader are used.

    TaskAttemptID taskId = new TaskAttemptID("jt", 0, true, 0, 0);
    Configuration conf1 = new Configuration();
    conf1.set(DUMMY_KEY, "STATE1");
    TaskAttemptContext context1 = new TaskAttemptContext(conf1, taskId);

    // This will create a CombineFileRecordReader that itself contains a
    // DummyRecordReader.
    InputFormat inputFormat = new ChildRRInputFormat();

    Path [] files = { new Path("file1") };
    long [] lengths = { 1 };

    CombineFileSplit split = new CombineFileSplit(files, lengths);

    RecordReader rr = inputFormat.createRecordReader(split, context1);
    assertTrue("Unexpected RR type!", rr instanceof CombineFileRecordReader);

    // Verify that the initial configuration is the one being used.
    // Right after construction the dummy key should have value "STATE1"
    assertEquals("Invalid initial dummy key value", "STATE1",
      rr.getCurrentKey().toString());

    // Switch the active context for the RecordReader...
    Configuration conf2 = new Configuration();
    conf2.set(DUMMY_KEY, "STATE2");
    TaskAttemptContext context2 = new TaskAttemptContext(conf2, taskId);
    rr.initialize(split, context2);

    // And verify that the new context is updated into the child record reader.
    assertEquals("Invalid secondary dummy key value", "STATE2",
      rr.getCurrentKey().toString());
View Full Code Here

    // Test that a split containing multiple files works correctly,
    // with the child RecordReader getting its initialize() method
    // called a second time.
    TaskAttemptID taskId = new TaskAttemptID("jt", 0, true, 0, 0);
    Configuration conf = new Configuration();
    TaskAttemptContext context = new TaskAttemptContext(conf, taskId);

    // This will create a CombineFileRecordReader that itself contains a
    // DummyRecordReader.
    InputFormat inputFormat = new ChildRRInputFormat();
View Full Code Here

            throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$
        }
        if (id == null) {
            throw new IllegalArgumentException("id must not be null"); //$NON-NLS-1$
        }
        return new TaskAttemptContext(conf, id);
    }
View Full Code Here

            throw new IllegalArgumentException("id must not be null"); //$NON-NLS-1$
        }
        if (progressable == null) {
            return newTaskAttemptContext(conf, id);
        }
        return new TaskAttemptContext(conf, id) {
            @Override
            public void progress() {
                progressable.progress();
                super.progress();
            }
View Full Code Here

        assert counters != null;
        Job job = JobCompatibility.newJob(context.getConfiguration());
        job.setOutputFormatClass(formatClass);
        job.setOutputKeyClass(keyClass);
        job.setOutputValueClass(valueClass);
        TaskAttemptContext localContext = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                context.getTaskAttemptID());
        if (FileOutputFormat.class.isAssignableFrom(formatClass)) {
            setOutputFilePrefix(localContext, name);
        }
        OutputFormat<?, ?> format = ReflectionUtils.newInstance(
                formatClass,
                localContext.getConfiguration());
        RecordWriter<?, ?> writer = format.getRecordWriter(localContext);
        return new ResultOutput<Writable>(localContext, writer);
    }
View Full Code Here

        checkType(definition, description);
        Configuration conf = configurations.newInstance();
        Job job = JobCompatibility.newJob(conf);
        String resolved = variables.parse(description.getPathPrefix(), false);
        FileInputFormat.setInputPaths(job, new Path(resolved));
        TaskAttemptContext taskContext = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(JobCompatibility.newJobId())));
        FileInputFormat<?, V> format = getOpposite(conf, description.getOutputFormat());
        FileInputFormatDriver<V> result = new FileInputFormatDriver<V>(definition, taskContext, format);
        return result;
View Full Code Here

            throw new IOException("Failed to create temporary directory");
        }
        LOG.debug("Using staging deploy target: {}", temporaryDir);
        URI uri = temporaryDir.toURI();
        FileOutputFormat.setOutputPath(job, new Path(uri));
        TaskAttemptContext context = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(JobCompatibility.newJobId())));
        FileOutputFormatDriver<V> result = new FileOutputFormatDriver<V>(context, output, NullWritable.get()) {
            @Override
            public void close() throws IOException {
View Full Code Here

      SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
    @SuppressWarnings("unchecked")
    @Override
    public void transition(TaskAttemptImpl taskAttempt,
        TaskAttemptEvent event) {
      TaskAttemptContext taskContext =
        new TaskAttemptContextImpl(taskAttempt.conf,
            TypeConverter.fromYarn(taskAttempt.attemptId));
      taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
          taskAttempt.attemptId, taskContext));
    }
View Full Code Here

    boolean needToClean = false;
    String recoveredState = taInfo.getTaskStatus();
    if (recoverOutput
        && TaskAttemptState.SUCCEEDED.toString().equals(recoveredState)) {
      TaskAttemptContext tac = new TaskAttemptContextImpl(conf,
          TypeConverter.fromYarn(attemptId));
      try {
        committer.recoverTask(tac);
        LOG.info("Recovered output from task attempt " + attemptId);
      } catch (Exception e) {
        LOG.error("Unable to recover task attempt " + attemptId, e);
        LOG.info("Task attempt " + attemptId + " will be recovered as KILLED");
        recoveredState = TaskAttemptState.KILLED.toString();
        needToClean = true;
      }
    }

    TaskAttemptStateInternal attemptState;
    if (TaskAttemptState.SUCCEEDED.toString().equals(recoveredState)) {
      attemptState = TaskAttemptStateInternal.SUCCEEDED;
      reportedStatus.taskState = TaskAttemptState.SUCCEEDED;
      eventHandler.handle(createJobCounterUpdateEventTASucceeded(this));
      logAttemptFinishedEvent(attemptState);
    } else if (TaskAttemptState.FAILED.toString().equals(recoveredState)) {
      attemptState = TaskAttemptStateInternal.FAILED;
      reportedStatus.taskState = TaskAttemptState.FAILED;
      eventHandler.handle(createJobCounterUpdateEventTAFailed(this, false));
      TaskAttemptUnsuccessfulCompletionEvent tauce =
          createTaskAttemptUnsuccessfulCompletionEvent(this,
              TaskAttemptStateInternal.FAILED);
      eventHandler.handle(
          new JobHistoryEvent(attemptId.getTaskId().getJobId(), tauce));
    } else {
      if (!TaskAttemptState.KILLED.toString().equals(recoveredState)) {
        if (String.valueOf(recoveredState).isEmpty()) {
          LOG.info("TaskAttempt" + attemptId
              + " had not completed, recovering as KILLED");
        } else {
          LOG.warn("TaskAttempt " + attemptId + " found in unexpected state "
              + recoveredState + ", recovering as KILLED");
        }
        addDiagnosticInfo("Killed during application recovery");
        needToClean = true;
      }
      attemptState = TaskAttemptStateInternal.KILLED;
      reportedStatus.taskState = TaskAttemptState.KILLED;
      eventHandler.handle(createJobCounterUpdateEventTAKilled(this, false));
      TaskAttemptUnsuccessfulCompletionEvent tauce =
          createTaskAttemptUnsuccessfulCompletionEvent(this,
              TaskAttemptStateInternal.KILLED);
      eventHandler.handle(
          new JobHistoryEvent(attemptId.getTaskId().getJobId(), tauce));
    }

    if (needToClean) {
      TaskAttemptContext tac = new TaskAttemptContextImpl(conf,
          TypeConverter.fromYarn(attemptId));
      try {
        committer.abortTask(tac);
      } catch (Exception e) {
        LOG.warn("Task cleanup failed for attempt " + attemptId, e);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskAttemptContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.