Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.Counters$FrameworkGroupImpl


  private AppContext mockAppContext(ApplicationId appId, boolean isLastAMRetry) {
    JobId jobId = TypeConverter.toYarn(TypeConverter.fromYarn(appId));
    AppContext mockContext = mock(AppContext.class);
    Job mockJob = mock(Job.class);
    when(mockJob.getAllCounters()).thenReturn(new Counters());
    when(mockJob.getTotalMaps()).thenReturn(10);
    when(mockJob.getTotalReduces()).thenReturn(10);
    when(mockJob.getName()).thenReturn("mockjob");
    when(mockContext.getJob(jobId)).thenReturn(mockJob);
    when(mockContext.getApplicationID()).thenReturn(appId);
View Full Code Here


  public void testTaskAttemptFinishedEvent() throws Exception {

    JobID jid = new JobID("001", 1);
    TaskID tid = new TaskID(jid, TaskType.REDUCE, 2);
    TaskAttemptID taskAttemptId = new TaskAttemptID(tid, 3);
    Counters counters = new Counters();
    TaskAttemptFinishedEvent test = new TaskAttemptFinishedEvent(taskAttemptId,
        TaskType.REDUCE, "TEST", 123L, "RAKNAME", "HOSTNAME", "STATUS",
        counters);
    assertEquals(test.getAttemptId().toString(), taskAttemptId.toString());
View Full Code Here

            if (eventId < 2) {
              return new TaskStartedEvent(tid, 0, taskType, "");
            }
            if (eventId < 4) {
              TaskFailedEvent tfe = new TaskFailedEvent(tid, 0, taskType,
                  "failed", "FAILED", null, new Counters());
              tfe.setDatum(tfe.getDatum());
              return tfe;
            }
            if (eventId < 5) {
              JobUnsuccessfulCompletionEvent juce =
View Full Code Here

  private TaskAttemptInfo getMockTaskAttemptInfo(TaskAttemptID tai,
      TaskAttemptState tas) {

    ContainerId ci = mock(ContainerId.class);
    Counters counters = mock(Counters.class);
    TaskType tt = TaskType.MAP;

    long finishTime = System.currentTimeMillis();

    TaskAttemptInfo mockTAinfo = mock(TaskAttemptInfo.class);
View Full Code Here

      } else if (getStatus) {
        Job job = cluster.getJob(JobID.forName(jobid));
        if (job == null) {
          System.out.println("Could not find job " + jobid);
        } else {
          Counters counters = job.getCounters();
          System.out.println();
          System.out.println(job);
          if (counters != null) {
            System.out.println(counters);
          } else {
            System.out.println("Counters not available. Job is retired.");
          }
          exitCode = 0;
        }
      } else if (getCounter) {
        Job job = cluster.getJob(JobID.forName(jobid));
        if (job == null) {
          System.out.println("Could not find job " + jobid);
        } else {
          Counters counters = job.getCounters();
          if (counters == null) {
            System.out.println("Counters not available for retired job " +
            jobid);
            exitCode = -1;
          } else {
View Full Code Here

 
  private void processJobFinishedEvent(
      PreparedStatement entityPS,
      PreparedStatement workflowUpdateNumCompletedPS,
      LoggingEvent logEvent, JobFinishedEvent historyEvent) {
    Counters counters = historyEvent.getMapCounters();
    long inputBytes = 0;
    if (counters != null) {
      for (CounterGroup group : counters) {
        for (Counter counter : group) {
          if (counter.getName().equals("HDFS_BYTES_READ"))
View Full Code Here

          || state == JobState.KILLED || state == JobState.SUCCEEDED) {
        this.mayBeConstructFinalFullCounters();
        return fullCounters;
      }

      Counters counters = new Counters();
      counters.incrAllCounters(jobCounters);
      return incrTaskCounters(counters, tasks.values());

    } finally {
      readLock.unlock();
    }
View Full Code Here

    }
  }

  @Private
  public void constructFinalFullcounters() {
    this.fullCounters = new Counters();
    this.finalMapCounters = new Counters();
    this.finalReduceCounters = new Counters();
    this.fullCounters.incrAllCounters(jobCounters);
    for (Task t : this.tasks.values()) {
      Counters counters = t.getCounters();
      switch (t.getType()) {
      case MAP:
        this.finalMapCounters.incrAllCounters(counters);
        break;
      case REDUCE:
View Full Code Here

    }
  }

  @Override
  public Counters getCounters() {
    Counters counters = null;
    readLock.lock();
    try {
      TaskAttempt bestAttempt = selectBestAttempt();
      if (bestAttempt != null) {
        counters = bestAttempt.getCounters();
View Full Code Here

  @Override
  public Counters getCounters() {
    readLock.lock();
    try {
      Counters counters = reportedStatus.counters;
      if (counters == null) {
        counters = EMPTY_COUNTERS;
//        counters.groups = new HashMap<String, CounterGroup>();
      }
      return counters;
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.Counters$FrameworkGroupImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.