Package org.apache.hadoop.mapreduce.task

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl


    }

    @Override
    public JobContext createJobContext(Configuration conf,
                                       JobID jobId) {
        return new JobContextImpl(conf instanceof JobConf? new JobConf(conf) : conf,
                                  jobId);
    }
View Full Code Here


    };
  }

  @Override
  public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
    return new JobContextImpl(job.getConfiguration(), job.getJobID());
  }
View Full Code Here

        }

        checkTaskLimits();

        if (job.newApiCommitter) {
          job.jobContext = new JobContextImpl(job.conf,
              job.oldJobId);
        } else {
          job.jobContext = new org.apache.hadoop.mapred.JobContextImpl(
              job.conf, job.oldJobId);
        }
View Full Code Here

          inputFormat.getSplits(
              new JobContext(new Configuration(), new JobID()), 1);
else[HADOOP_NON_JOBCONTEXT_IS_INTERFACE]*/
      List<InputSplit> splitArray =
          inputFormat.getSplits(
              new JobContextImpl(new Configuration(), new JobID()), 1);
/*end[HADOOP_NON_JOBCONTEXT_IS_INTERFACE]*/
    ByteArrayOutputStream byteArrayOutputStream =
        new ByteArrayOutputStream();
    DataOutputStream outputStream = new DataOutputStream(byteArrayOutputStream);
    ((Writable) splitArray.get(0)).write(outputStream);
View Full Code Here

    }

    static public JobContext createJobContext(Configuration conf,
            JobID jobId) {
        if (conf instanceof JobConf) {
            return new JobContextImpl(new JobConf(conf), jobId);
        } else {
            return new JobContextImpl(conf, jobId);
        }
    }
View Full Code Here

        }

        checkTaskLimits();

        if (job.newApiCommitter) {
          job.jobContext = new JobContextImpl(job.conf,
              job.oldJobId);
        } else {
          job.jobContext = new org.apache.hadoop.mapred.JobContextImpl(
              job.conf, job.oldJobId);
        }
View Full Code Here

   * {@inheritDoc}
   *
   * Use the Hadoop 2.x way of creating a {@link JobContext} object.
   */
  public JobContext createJobContext(Configuration configuration) {
    return new JobContextImpl(configuration, null);
  }
View Full Code Here

    }

    static public JobContext createJobContext(Configuration conf,
            JobID jobId) {
        if (conf instanceof JobConf) {
            return new JobContextImpl(new JobConf(conf), jobId);
        } else {
            return new JobContextImpl(conf, jobId);
        }
    }
View Full Code Here

        }

        checkTaskLimits();

        if (job.newApiCommitter) {
          job.jobContext = new JobContextImpl(job.conf,
              job.oldJobId);
        } else {
          job.jobContext = new org.apache.hadoop.mapred.JobContextImpl(
              job.conf, job.oldJobId);
        }
View Full Code Here

  }

  @Test
  public void testNoCommitAction() {
    TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
    JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
        taskAttemptContext.getTaskAttemptID().getJobID());
    try {
      OutputCommitter committer = new CopyCommitter(null, taskAttemptContext);
      committer.commitJob(jobContext);
      Assert.assertEquals(taskAttemptContext.getStatus(), "Commit Successful");
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.task.JobContextImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.