Package org.apache.hadoop.tools.rumen

Examples of org.apache.hadoop.tools.rumen.TaskInfo


    org.apache.hadoop.mapred.TaskAttemptID taskIdOldApi =
        org.apache.hadoop.mapred.TaskAttemptID.downgrade(taskId);       
    Task task = new ReduceTask("dummyjobfile", taskIdOldApi, 0, 0,
                               numSlotsRequired);
    // all byte counters are 0
    TaskInfo taskInfo = new TaskInfo(0, 0, 0, 0, 0);
    ReduceTaskAttemptInfo taskAttemptInfo =
        new ReduceTaskAttemptInfo(State.SUCCEEDED, taskInfo, 0, 0,
                                  reduceRuntime);
    TaskTrackerAction action =
        new SimulatorLaunchTaskAction(task, taskAttemptInfo);   
View Full Code Here


      return;
    }
    final int maps = jobdesc.getNumberMaps();
    final int reds = jobdesc.getNumberReduces();
    for (int i = 0; i < maps; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      mapInputBytesTotal += info.getInputBytes();
      mapOutputBytesTotal += info.getOutputBytes();
      mapOutputRecordsTotal += info.getOutputRecords();
    }
    final double[] reduceRecordRatio = new double[reds];
    final double[] reduceByteRatio = new double[reds];
    for (int i = 0; i < reds; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.REDUCE, i);
      reduceByteRatio[i] = info.getInputBytes() / (1.0 * mapOutputBytesTotal);
      reduceRecordRatio[i] =
        info.getInputRecords() / (1.0 * mapOutputRecordsTotal);
    }
    final InputStriper striper = new InputStriper(inputDir, mapInputBytesTotal);
    final List<InputSplit> splits = new ArrayList<InputSplit>();
    for (int i = 0; i < maps; ++i) {
      final int nSpec = reds / maps + ((reds % maps) > i ? 1 : 0);
      final long[] specBytes = new long[nSpec];
      final long[] specRecords = new long[nSpec];
      for (int j = 0; j < nSpec; ++j) {
        final TaskInfo info =
          jobdesc.getTaskInfo(TaskType.REDUCE, i + j * maps);
        specBytes[j] = info.getOutputBytes();
        specRecords[j] = info.getOutputRecords();
        if (LOG.isDebugEnabled()) {
          LOG.debug(String.format("SPEC(%d) %d -> %d %d %d", id(), i,
              i + j * maps, info.getOutputRecords(), info.getOutputBytes()));
        }
      }
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      splits.add(new GridmixSplit(striper.splitFor(inputDir,
              info.getInputBytes(), 3), maps, i,
            info.getInputBytes(), info.getInputRecords(),
            info.getOutputBytes(), info.getOutputRecords(),
            reduceByteRatio, reduceRecordRatio, specBytes, specRecords));
    }
    pushDescription(id(), splits);
  }
View Full Code Here

    long expMapOutputBytes = 0;
    long expMapInputRecs = 0;
    long expMapOutputRecs = 0;
    Map<String,Long> mapCounters = new HashMap<String,Long>();
    for (int index = 0; index < zombieJob.getNumberMaps(); index ++) {
      TaskInfo mapTask = zombieJob.getTaskInfo(TaskType.MAP, index);
      expMapInputBytes += mapTask.getInputBytes();
      expMapOutputBytes += mapTask.getOutputBytes();
      expMapInputRecs += mapTask.getInputRecords();
      expMapOutputRecs += mapTask.getOutputRecords();
    }
    mapCounters.put("MAP_INPUT_BYTES", expMapInputBytes);
    mapCounters.put("MAP_OUTPUT_BYTES", expMapOutputBytes);
    mapCounters.put("MAP_INPUT_RECS", expMapInputRecs);
    mapCounters.put("MAP_OUTPUT_RECS", expMapOutputRecs);
View Full Code Here

    long expReduceOutputBytes = 0;
    long expReduceInputRecs = 0;
    long expReduceOutputRecs = 0;
    Map<String,Long> reduceCounters = new HashMap<String,Long>();
    for (int index = 0; index < zombieJob.getNumberReduces(); index ++) {
      TaskInfo reduceTask = zombieJob.getTaskInfo(TaskType.REDUCE, index);
      expReduceInputBytes += reduceTask.getInputBytes();
      expReduceOutputBytes += reduceTask.getOutputBytes();
      expReduceInputRecs += reduceTask.getInputRecords();
      expReduceOutputRecs += reduceTask.getOutputRecords();
    }
    reduceCounters.put("REDUCE_INPUT_BYTES", expReduceInputBytes);
    reduceCounters.put("REDUCE_OUTPUT_BYTES", expReduceOutputBytes);
    reduceCounters.put("REDUCE_INPUT_RECS", expReduceInputRecs);
    reduceCounters.put("REDUCE_OUTPUT_RECS", expReduceOutputRecs);
View Full Code Here

              contains(GridMixConfig.GRIDMIX_MEMORY_EMULATION_PLUGIN);
    }

    if (isMemEmulOn) {
      for (int index = 0; index < zombieJob.getNumberMaps(); index ++) {
        TaskInfo mapTask = zombieJob.getTaskInfo(TaskType.MAP, index);
        if (mapTask.getResourceUsageMetrics().getHeapUsage() > 0) {
          origJobMapsTHU +=
                  mapTask.getResourceUsageMetrics().getHeapUsage();
        }
      }
      LOG.info("Original Job Maps Total Heap Usage: " + origJobMapsTHU);

      for (int index = 0; index < zombieJob.getNumberReduces(); index ++) {
        TaskInfo reduceTask = zombieJob.getTaskInfo(TaskType.REDUCE, index);
        if (reduceTask.getResourceUsageMetrics().getHeapUsage() > 0) {
          origJobReducesTHU +=
                  reduceTask.getResourceUsageMetrics().getHeapUsage();
        }
      }
      LOG.info("Original Job Reduces Total Heap Usage: " + origJobReducesTHU);

      simuJobMapsTHU =
View Full Code Here

    long mapTotalCPUUsage = 0;
    long reduceTotalCPUUsage = 0;
    Map<String,Long> resourceMetrics = new HashMap<String,Long>();

    for (int index = 0; index < zombieJob.getNumberMaps(); index ++) {
      TaskInfo mapTask = zombieJob.getTaskInfo(TaskType.MAP, index);
      if (mapTask.getResourceUsageMetrics().getCumulativeCpuUsage() > 0) {
        mapTotalCPUUsage +=
            mapTask.getResourceUsageMetrics().getCumulativeCpuUsage();
      }
    }
    resourceMetrics.put("MAP", mapTotalCPUUsage);
   
    for (int index = 0; index < zombieJob.getNumberReduces(); index ++) {
      TaskInfo reduceTask = zombieJob.getTaskInfo(TaskType.REDUCE, index);
      if (reduceTask.getResourceUsageMetrics().getCumulativeCpuUsage() > 0) {
        reduceTotalCPUUsage +=
            reduceTask.getResourceUsageMetrics().getCumulativeCpuUsage();
      }
    }
    resourceMetrics.put("REDUCE", reduceTotalCPUUsage);
    return resourceMetrics;
  }
View Full Code Here

   
    @Override
    public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
      switch (taskType) {
        case MAP:
          return new TaskInfo(m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1);
        case REDUCE:
          return new TaskInfo(r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1);
        default:
          throw new IllegalArgumentException("Not interested");
      }
    }
View Full Code Here

      TaskType taskType, int taskNumber, int taskAttemptNumber) {
      switch (taskType) {
        case MAP:
          return new MapTaskAttemptInfo(
            State.SUCCEEDED,
            new TaskInfo(
              m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1),
            100);

        case REDUCE:
          return new ReduceTaskAttemptInfo(
            State.SUCCEEDED,
            new TaskInfo(
              r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1),
            100, 100, 100);
      }
      throw new UnsupportedOperationException();
View Full Code Here

   
    @Override
    public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
      switch (taskType) {
        case MAP:
          return new TaskInfo(m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1);
        case REDUCE:
          return new TaskInfo(r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1);
        default:
          throw new IllegalArgumentException("Not interested");
      }
    }
View Full Code Here

      TaskType taskType, int taskNumber, int taskAttemptNumber) {
      switch (taskType) {
        case MAP:
          return new MapTaskAttemptInfo(
            State.SUCCEEDED,
            new TaskInfo(
              m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1),
            100);

        case REDUCE:
          return new ReduceTaskAttemptInfo(
            State.SUCCEEDED,
            new TaskInfo(
              r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1),
            100, 100, 100);
      }
      throw new UnsupportedOperationException();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.tools.rumen.TaskInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.