Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskAttemptID


  }

  @Private
  public void commitTask(TaskAttemptContext context, Path taskAttemptPath)
  throws IOException {
    TaskAttemptID attemptId = context.getTaskAttemptID();
    if (hasOutputPath()) {
      context.progress();
      if(taskAttemptPath == null) {
        taskAttemptPath = getTaskAttemptPath(context);
      }
View Full Code Here


  @Override
  public void recoverTask(TaskAttemptContext context)
      throws IOException {
    if(hasOutputPath()) {
      context.progress();
      TaskAttemptID attemptId = context.getTaskAttemptID();
      int previousAttempt = getAppAttemptId(context) - 1;
      if (previousAttempt < 0) {
        throw new IOException ("Cannot recover task output for first attempt...");
      }
View Full Code Here

    InputSplit split = iF.getSplits(jc).get(0);
    sd.setOriginalSchema(jc, schema);
    sd.setOutputSchema(jc, schema);
    sd.initialize(jc, getProps());

    TaskAttemptContext tac = new TaskAttemptContext(conf, new TaskAttemptID());
    RecordReader<?,?> rr = iF.createRecordReader(split,tac);
    rr.initialize(split, tac);
    HCatRecord[] tuples = getExpectedRecords();
    for(int j=0; j < 2; j++){
      Assert.assertTrue(rr.nextKeyValue());
View Full Code Here

    sd.setOriginalSchema(jc, buildHiveSchema());
    sd.setOutputSchema(jc, buildPrunedSchema());

    sd.initialize(jc, getProps());
    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR,jc.getConfiguration().get(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR));
    TaskAttemptContext tac = new TaskAttemptContext(conf, new TaskAttemptID());
    RecordReader<?,?> rr = iF.createRecordReader(split,tac);
    rr.initialize(split, tac);
    HCatRecord[] tuples = getPrunedRecords();
    for(int j=0; j < 2; j++){
      Assert.assertTrue(rr.nextKeyValue());
View Full Code Here

    sd.initialize(jc, getProps());
    Map<String,String> map = new HashMap<String,String>(1);
    map.put("part1", "first-part");
    sd.setPartitionValues(jc, map);
    conf.set(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR,jc.getConfiguration().get(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR));
    TaskAttemptContext tac = new TaskAttemptContext(conf, new TaskAttemptID());
    RecordReader<?,?> rr = iF.createRecordReader(split,tac);
    rr.initialize(split, tac);
    HCatRecord[] tuples = getReorderedCols();
    for(int j=0; j < 2; j++){
      Assert.assertTrue(rr.nextKeyValue());
View Full Code Here

      context.getConfiguration().setLong("mapred.max.split.size",maxSplitSize);
      List<InputSplit> splits = inputFormat.getSplits(context);
      assertEquals("splits length should be " + splitNumber, splits.size(), splitNumber);
      int readCount = 0;
      for (int i = 0; i < splits.size(); i++) {
        TaskAttemptContext tac = new TaskAttemptContext(jonconf, new TaskAttemptID());
        RecordReader<LongWritable, BytesRefArrayWritable> rr = inputFormat.createRecordReader(splits.get(i), tac);
        rr.initialize(splits.get(i), tac);
        while (rr.nextKeyValue()) {
          readCount++;
        }
View Full Code Here

  public void testRecordReaderInit() throws InterruptedException, IOException {
    // Test that we properly initialize the child recordreader when
    // CombineFileInputFormat and CombineFileRecordReader are used.

    TaskAttemptID taskId = new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0);
    Configuration conf1 = new Configuration();
    conf1.set(DUMMY_KEY, "STATE1");
    TaskAttemptContext context1 = new TaskAttemptContextImpl(conf1, taskId);

    // This will create a CombineFileRecordReader that itself contains a
View Full Code Here

  public void testReinit() throws Exception {
    // Test that a split containing multiple files works correctly,
    // with the child RecordReader getting its initialize() method
    // called a second time.
    TaskAttemptID taskId = new TaskAttemptID("jt", 0, TaskType.MAP, 0, 0);
    Configuration conf = new Configuration();
    TaskAttemptContext context = new TaskAttemptContextImpl(conf, taskId);

    // This will create a CombineFileRecordReader that itself contains a
    // DummyRecordReader.
View Full Code Here

    }

    Assert.assertEquals(4, taLogDirs.size())// all 4 attempts found

    for (Map.Entry<TaskAttemptID,Path> dirEntry : taLogDirs.entrySet()) {
      final TaskAttemptID tid = dirEntry.getKey();
      final Path profilePath = new Path(dirEntry.getValue(),
        TaskLog.LogName.PROFILE.toString());
      final Path stdoutPath = new Path(dirEntry.getValue(),
        TaskLog.LogName.STDOUT.toString());
      if (useDefault || tid.getTaskType() == TaskType.MAP) {
        if (tid.getTaskID().getId() == PROFILED_TASK_ID) {
          // verify profile.out
          final BufferedReader br = new BufferedReader(new InputStreamReader(
            localFs.open(profilePath)));
          final String line = br.readLine();
          Assert.assertTrue("No hprof content found!",
            line !=null && line.startsWith("JAVA PROFILE"));
          br.close();
          Assert.assertEquals(0L, localFs.getFileStatus(stdoutPath).getLen());
        } else {
          Assert.assertFalse("hprof file should not exist",
            localFs.exists(profilePath));
        }
      } else {
        Assert.assertFalse("hprof file should not exist",
          localFs.exists(profilePath));
        if (tid.getTaskID().getId() == PROFILED_TASK_ID) {
          // reducer is profiled with Xprof
          final BufferedReader br = new BufferedReader(new InputStreamReader(
            localFs.open(stdoutPath)));
          boolean flatProfFound = false;
          String line;
View Full Code Here

  private TaskAttemptContext getTestTaskAttemptContext(final Job job)
  throws IOException, Exception {
    TaskAttemptContext context;
    if (isPost020MapReduce()) {
      TaskAttemptID id =
        TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0");
      Class<?> clazz =
        Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
      Constructor<?> c = clazz.
          getConstructor(Configuration.class, TaskAttemptID.class);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskAttemptID

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.