Package org.apache.hadoop.mapreduce.jobhistory

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser


    Path historyFilePath = new Path (historyDir.toString(),
      historyFileName);

    System.out.println("History File is " + historyFilePath.toString());

    JobHistoryParser parser =
      new JobHistoryParser(fs, historyFilePath);

    JobHistoryParser.JobInfo jobInfo = parser.parse();

    assertTrue (jobInfo.getUsername().equals(username));
    assertTrue(jobInfo.getJobname().equals(weirdJob));
    assertTrue(jobInfo.getJobQueueName().equals(weirdJobQueueName));
    assertTrue(jobInfo.getJobConfPath().equals(weirdPath));
View Full Code Here


        validateViewJob(jobTasksJSP + "&taskType=" + taskType + "&status="
            + state, "GET");
      }
    }

    JobHistoryParser parser =
        new JobHistoryParser(new Path(historyFileName).getFileSystem(conf),
            historyFileName);
    JobInfo jobInfo = parser.parse();
    Map<TaskID, TaskInfo> tipsMap = jobInfo.getAllTasks();
    for (TaskID tip : tipsMap.keySet()) {
      // validate access of taskdetailshistory.jsp
      validateViewJob(jtURL + "/taskdetailshistory.jsp?logFile="
          + historyFileName + "&tipid=" + tip.toString(), "GET");
View Full Code Here

      } catch (IOException ioe) {
        LOG.info("Can not open history file: " + historyFilePath, ioe);
        throw (new Exception("Can not open History File"));
      }

      JobHistoryParser parser = new JobHistoryParser(in);
      final EventReader realReader = new EventReader(in);
      EventReader reader = Mockito.mock(EventReader.class);
      if (numMaps == numSuccessfulMaps) {
        reader = realReader;
      } else {
        final AtomicInteger numFinishedEvents = new AtomicInteger(0)// Hack!
        Mockito.when(reader.getNextEvent()).thenAnswer(
            new Answer<HistoryEvent>() {
              public HistoryEvent answer(InvocationOnMock invocation)
              throws IOException {
                HistoryEvent event = realReader.getNextEvent();
                if (event instanceof TaskFinishedEvent) {
                  numFinishedEvents.incrementAndGet();
                }

                if (numFinishedEvents.get() <= numSuccessfulMaps) {
                  return event;
                } else {
                  throw new IOException("test");
                }
              }
            }
        );
      }

      jobInfo = parser.parse(reader);

      numFinishedMaps =
        computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);

      if (numFinishedMaps != numMaps) {
        Exception parseException = parser.getParseException();
        Assert.assertNotNull("Didn't get expected parse exception",
            parseException);
      }
    }
   
View Full Code Here

    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }

    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    Assert.assertNull("Caught an expected exception " + parseException,
        parseException);
    int noOffailedAttempts = 0;
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    for (Task task : job.getTasks().values()) {
View Full Code Here

    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }

    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    Assert.assertNull("Caught an expected exception " + parseException,
        parseException);
    for (Map.Entry<TaskID,TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
      TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
      CompletedTask ct = new CompletedTask(yarnTaskID, entry.getValue());
View Full Code Here

  }

  @Test
  public void testMultipleFailedTasks() throws Exception {
    JobHistoryParser parser =
        new JobHistoryParser(Mockito.mock(FSDataInputStream.class));
    EventReader reader = Mockito.mock(EventReader.class);
    final AtomicInteger numEventsRead = new AtomicInteger(0); // Hack!
    final org.apache.hadoop.mapreduce.TaskType taskType =
        org.apache.hadoop.mapreduce.TaskType.MAP;
    final TaskID[] tids = new TaskID[2];
    JobID jid = new JobID("1", 1);
    tids[0] = new TaskID(jid, taskType, 0);
    tids[1] = new TaskID(jid, taskType, 1);
    Mockito.when(reader.getNextEvent()).thenAnswer(
        new Answer<HistoryEvent>() {
          public HistoryEvent answer(InvocationOnMock invocation)
              throws IOException {
            // send two task start and two task fail events for tasks 0 and 1
            int eventId = numEventsRead.getAndIncrement();
            TaskID tid = tids[eventId & 0x1];
            if (eventId < 2) {
              return new TaskStartedEvent(tid, 0, taskType, "");
            }
            if (eventId < 4) {
              TaskFailedEvent tfe = new TaskFailedEvent(tid, 0, taskType,
                  "failed", "FAILED", null);
              tfe.setDatum(tfe.getDatum());
              return tfe;
            }
            return null;
          }
        });
    JobInfo info = parser.parse(reader);
    assertTrue("Task 0 not implicated",
        info.getErrorInfo().contains(tids[0].toString()));
  }
View Full Code Here

      } catch (IOException ioe) {
        LOG.info("Can not open history file: " + historyFilePath, ioe);
        throw (new Exception("Can not open History File"));
      }

      JobHistoryParser parser = new JobHistoryParser(in);
      final EventReader realReader = new EventReader(in);
      EventReader reader = Mockito.mock(EventReader.class);
      if (numMaps == numSuccessfulMaps) {
        reader = realReader;
      } else {
        final AtomicInteger numFinishedEvents = new AtomicInteger(0); // Hack!
        Mockito.when(reader.getNextEvent()).thenAnswer(
            new Answer<HistoryEvent>() {
              public HistoryEvent answer(InvocationOnMock invocation)
                  throws IOException {
                HistoryEvent event = realReader.getNextEvent();
                if (event instanceof TaskFinishedEvent) {
                  numFinishedEvents.incrementAndGet();
                }

                if (numFinishedEvents.get() <= numSuccessfulMaps) {
                  return event;
                } else {
                  throw new IOException("test");
                }
              }
            });
      }

      jobInfo = parser.parse(reader);

      numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);

      if (numFinishedMaps != numMaps) {
        Exception parseException = parser.getParseException();
        Assert.assertNotNull("Didn't get expected parse exception",
            parseException);
      }
    }
View Full Code Here

      JobHistory jobHistory = new JobHistory();
      jobHistory.init(conf);
      HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
     
      JobHistoryParser parser;
      JobInfo jobInfo;
      synchronized (fileInfo) {
        Path historyFilePath = fileInfo.getHistoryFile();
        FSDataInputStream in = null;
        FileContext fc = null;
        try {
          fc = FileContext.getFileContext(conf);
          in = fc.open(fc.makeQualified(historyFilePath));
        } catch (IOException ioe) {
          LOG.info("Can not open history file: " + historyFilePath, ioe);
          throw (new Exception("Can not open History File"));
        }

        parser = new JobHistoryParser(in);
        jobInfo = parser.parse();
      }
      Exception parseException = parser.getParseException();
      Assert.assertNull("Caught an expected exception " + parseException,
          parseException);
      int noOffailedAttempts = 0;
      Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
      for (Task task : job.getTasks().values()) {
View Full Code Here

      JobHistory jobHistory = new JobHistory();
      jobHistory.init(conf);

      HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
     
      JobHistoryParser parser;
      JobInfo jobInfo;
      synchronized (fileInfo) {
        Path historyFilePath = fileInfo.getHistoryFile();
        FSDataInputStream in = null;
        FileContext fc = null;
        try {
          fc = FileContext.getFileContext(conf);
          in = fc.open(fc.makeQualified(historyFilePath));
        } catch (IOException ioe) {
          LOG.info("Can not open history file: " + historyFilePath, ioe);
          throw (new Exception("Can not open History File"));
        }

        parser = new JobHistoryParser(in);
        jobInfo = parser.parse();
      }
      Exception parseException = parser.getParseException();
      Assert.assertNull("Caught an expected exception " + parseException,
          parseException);
      for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
        TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
        CompletedTask ct = new CompletedTask(yarnTaskID, entry.getValue());
View Full Code Here

      JobHistory jobHistory = new JobHistory();
      jobHistory.init(conf);

      HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);

      JobHistoryParser parser;
      JobInfo jobInfo;
      synchronized (fileInfo) {
        Path historyFilePath = fileInfo.getHistoryFile();
        FSDataInputStream in = null;
        FileContext fc = null;
        try {
          fc = FileContext.getFileContext(conf);
          in = fc.open(fc.makeQualified(historyFilePath));
        } catch (IOException ioe) {
          LOG.info("Can not open history file: " + historyFilePath, ioe);
          throw (new Exception("Can not open History File"));
        }

        parser = new JobHistoryParser(in);
        jobInfo = parser.parse();
      }
      Exception parseException = parser.getParseException();
      assertNull("Caught an expected exception " + parseException,
          parseException);
      final List<String> originalDiagnostics = job.getDiagnostics();
      final String historyError = jobInfo.getErrorInfo();
      assertTrue("No original diagnostics for a failed job",
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.