Package org.apache.hadoop.tools.rumen

Examples of org.apache.hadoop.tools.rumen.TaskInfo


      long[] specInputBytes = new long[runTasks.length];
      long[] specOutputRecords = new long[runTasks.length];
      long[] specOutputBytes = new long[runTasks.length];

      for (int i = 0; i < runTasks.length; ++i) {
        final TaskInfo specInfo;
        final Counters counters = runTasks[i].getCounters();
        switch (type) {
          case MAP:
            runInputBytes[i] = counters.findCounter("FileSystemCounters",
                    "HDFS_BYTES_READ").getValue()
                    - counters.findCounter(TaskCounter.SPLIT_RAW_BYTES).getValue();
            runInputRecords[i] = (int) counters.findCounter(
                    TaskCounter.MAP_INPUT_RECORDS).getValue();
            runOutputBytes[i] = counters
                    .findCounter(TaskCounter.MAP_OUTPUT_BYTES).getValue();
            runOutputRecords[i] = (int) counters.findCounter(
                    TaskCounter.MAP_OUTPUT_RECORDS).getValue();

            specInfo = spec.getTaskInfo(TaskType.MAP, i);
            specInputRecords[i] = specInfo.getInputRecords();
            specInputBytes[i] = specInfo.getInputBytes();
            specOutputRecords[i] = specInfo.getOutputRecords();
            specOutputBytes[i] = specInfo.getOutputBytes();

            LOG.info(String.format(type + " SPEC: %9d -> %9d :: %5d -> %5d\n",
                    specInputBytes[i], specOutputBytes[i], specInputRecords[i],
                    specOutputRecords[i]));
            LOG.info(String.format(type + " RUN:  %9d -> %9d :: %5d -> %5d\n",
                    runInputBytes[i], runOutputBytes[i], runInputRecords[i],
                    runOutputRecords[i]));
            break;
          case REDUCE:
            runInputBytes[i] = 0;
            runInputRecords[i] = (int) counters.findCounter(
                    TaskCounter.REDUCE_INPUT_RECORDS).getValue();
            runOutputBytes[i] = counters.findCounter("FileSystemCounters",
                    "HDFS_BYTES_WRITTEN").getValue();
            runOutputRecords[i] = (int) counters.findCounter(
                    TaskCounter.REDUCE_OUTPUT_RECORDS).getValue();

            specInfo = spec.getTaskInfo(TaskType.REDUCE, i);
            // There is no reliable counter for reduce input bytes. The
            // variable-length encoding of intermediate records and other noise
            // make this quantity difficult to estimate. The shuffle and spec
            // input bytes are included in debug output for reference, but are
            // not checked
            specInputBytes[i] = 0;
            specInputRecords[i] = specInfo.getInputRecords();
            specOutputRecords[i] = specInfo.getOutputRecords();
            specOutputBytes[i] = specInfo.getOutputBytes();
            LOG.info(String.format(type + " SPEC: (%9d) -> %9d :: %5d -> %5d\n",
                    specInfo.getInputBytes(), specOutputBytes[i],
                    specInputRecords[i], specOutputRecords[i]));
            LOG.info(String
                    .format(type + " RUN:  (%9d) -> %9d :: %5d -> %5d\n", counters
                            .findCounter(TaskCounter.REDUCE_SHUFFLE_BYTES).getValue(),
                            runOutputBytes[i], runInputRecords[i], runOutputRecords[i]));
View Full Code Here


      return;
    }
    final int maps = jobdesc.getNumberMaps();
    final int reds = jobdesc.getNumberReduces();
    for (int i = 0; i < maps; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      mapInputBytesTotal += info.getInputBytes();
      mapOutputBytesTotal += info.getOutputBytes();
      mapOutputRecordsTotal += info.getOutputRecords();
    }
    final double[] reduceRecordRatio = new double[reds];
    final double[] reduceByteRatio = new double[reds];
    for (int i = 0; i < reds; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.REDUCE, i);
      reduceByteRatio[i] = info.getInputBytes() / (1.0 * mapOutputBytesTotal);
      reduceRecordRatio[i] =
        info.getInputRecords() / (1.0 * mapOutputRecordsTotal);
    }
    final InputStriper striper = new InputStriper(inputDir, mapInputBytesTotal);
    final List<InputSplit> splits = new ArrayList<InputSplit>();
    for (int i = 0; i < maps; ++i) {
      final int nSpec = reds / maps + ((reds % maps) > i ? 1 : 0);
      final long[] specBytes = new long[nSpec];
      final long[] specRecords = new long[nSpec];
      final ResourceUsageMetrics[] metrics = new ResourceUsageMetrics[nSpec];
      for (int j = 0; j < nSpec; ++j) {
        final TaskInfo info =
          jobdesc.getTaskInfo(TaskType.REDUCE, i + j * maps);
        specBytes[j] = info.getOutputBytes();
        specRecords[j] = info.getOutputRecords();
        metrics[j] = info.getResourceUsageMetrics();
        if (LOG.isDebugEnabled()) {
          LOG.debug(String.format("SPEC(%d) %d -> %d %d %d", id(), i,
                    i + j * maps, info.getOutputRecords(),
                    info.getOutputBytes()));
        }
      }
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      splits.add(
        new LoadSplit(striper.splitFor(inputDir, info.getInputBytes(), 3),
                      maps, i, info.getInputBytes(), info.getInputRecords(),
                      info.getOutputBytes(), info.getOutputRecords(),
                      reduceByteRatio, reduceRecordRatio, specBytes,
                      specRecords, info.getResourceUsageMetrics(),
                      metrics));
    }
    pushDescription(id(), splits);
  }
View Full Code Here

      long[] specInputBytes = new long[runTasks.length];
      long[] specOutputRecords = new long[runTasks.length];
      long[] specOutputBytes = new long[runTasks.length];

      for (int i = 0; i < runTasks.length; ++i) {
        final TaskInfo specInfo;
        final Counters counters = runTasks[i].getCounters();
        switch (type) {
          case MAP:
             runInputBytes[i] = counters.findCounter("FileSystemCounters",
                 "HDFS_BYTES_READ").getValue() -
                 counters.findCounter(Task.Counter.SPLIT_RAW_BYTES).getValue();
             runInputRecords[i] =
               (int)counters.findCounter(MAP_INPUT_RECORDS).getValue();
             runOutputBytes[i] =
               counters.findCounter(MAP_OUTPUT_BYTES).getValue();
             runOutputRecords[i] =
               (int)counters.findCounter(MAP_OUTPUT_RECORDS).getValue();

            specInfo = spec.getTaskInfo(TaskType.MAP, i);
            specInputRecords[i] = specInfo.getInputRecords();
            specInputBytes[i] = specInfo.getInputBytes();
            specOutputRecords[i] = specInfo.getOutputRecords();
            specOutputBytes[i] = specInfo.getOutputBytes();
            System.out.printf(type + " SPEC: %9d -> %9d :: %5d -> %5d\n",
                 specInputBytes[i], specOutputBytes[i],
                 specInputRecords[i], specOutputRecords[i]);
            System.out.printf(type + " RUN:  %9d -> %9d :: %5d -> %5d\n",
                 runInputBytes[i], runOutputBytes[i],
                 runInputRecords[i], runOutputRecords[i]);
            break;
          case REDUCE:
            runInputBytes[i] = 0;
            runInputRecords[i] =
              (int)counters.findCounter(REDUCE_INPUT_RECORDS).getValue();
            runOutputBytes[i] =
              counters.findCounter("FileSystemCounters",
                  "HDFS_BYTES_WRITTEN").getValue();
            runOutputRecords[i] =
              (int)counters.findCounter(REDUCE_OUTPUT_RECORDS).getValue();


            specInfo = spec.getTaskInfo(TaskType.REDUCE, i);
            // There is no reliable counter for reduce input bytes. The
            // variable-length encoding of intermediate records and other noise
            // make this quantity difficult to estimate. The shuffle and spec
            // input bytes are included in debug output for reference, but are
            // not checked
            specInputBytes[i] = 0;
            specInputRecords[i] = specInfo.getInputRecords();
            specOutputRecords[i] = specInfo.getOutputRecords();
            specOutputBytes[i] = specInfo.getOutputBytes();
            System.out.printf(type + " SPEC: (%9d) -> %9d :: %5d -> %5d\n",
                 specInfo.getInputBytes(), specOutputBytes[i],
                 specInputRecords[i], specOutputRecords[i]);
            System.out.printf(type + " RUN:  (%9d) -> %9d :: %5d -> %5d\n",
                 counters.findCounter(REDUCE_SHUFFLE_BYTES).getValue(),
                 runOutputBytes[i], runInputRecords[i], runOutputRecords[i]);
            break;
View Full Code Here

   
    @Override
    public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
      switch (taskType) {
        case MAP:
          return new TaskInfo(m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1);
        case REDUCE:
          return new TaskInfo(r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1);
        default:
          throw new IllegalArgumentException("Not interested");
      }
    }
View Full Code Here

    public TaskAttemptInfo getTaskAttemptInfo(
      TaskType taskType, int taskNumber, int taskAttemptNumber) {
      switch (taskType) {
        case MAP:
          return new MapTaskAttemptInfo(
            State.SUCCEEDED, new TaskInfo(
              m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1),100);

        case REDUCE:
          return new ReduceTaskAttemptInfo(
            State.SUCCEEDED, new TaskInfo(
              r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1),100,100,100);
      }
      throw new UnsupportedOperationException();
    }
View Full Code Here

      return;
    }
    final int maps = jobdesc.getNumberMaps();
    final int reds = jobdesc.getNumberReduces();
    for (int i = 0; i < maps; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      mapInputBytesTotal += info.getInputBytes();
      mapOutputBytesTotal += info.getOutputBytes();
      mapOutputRecordsTotal += info.getOutputRecords();
    }
    final double[] reduceRecordRatio = new double[reds];
    final double[] reduceByteRatio = new double[reds];
    for (int i = 0; i < reds; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.REDUCE, i);
      reduceByteRatio[i] = info.getInputBytes() / (1.0 * mapOutputBytesTotal);
      reduceRecordRatio[i] =
        info.getInputRecords() / (1.0 * mapOutputRecordsTotal);
    }
    final InputStriper striper = new InputStriper(inputDir, mapInputBytesTotal);
    final List<InputSplit> splits = new ArrayList<InputSplit>();
    for (int i = 0; i < maps; ++i) {
      final int nSpec = reds / maps + ((reds % maps) > i ? 1 : 0);
      final long[] specBytes = new long[nSpec];
      final long[] specRecords = new long[nSpec];
      final ResourceUsageMetrics[] metrics = new ResourceUsageMetrics[nSpec];
      for (int j = 0; j < nSpec; ++j) {
        final TaskInfo info =
          jobdesc.getTaskInfo(TaskType.REDUCE, i + j * maps);
        specBytes[j] = info.getOutputBytes();
        specRecords[j] = info.getOutputRecords();
        metrics[j] = info.getResourceUsageMetrics();
        if (LOG.isDebugEnabled()) {
          LOG.debug(String.format("SPEC(%d) %d -> %d %d %d %d %d %d %d", id(),
              i, i + j * maps, info.getOutputRecords(), info.getOutputBytes(),
              info.getResourceUsageMetrics().getCumulativeCpuUsage(),
              info.getResourceUsageMetrics().getPhysicalMemoryUsage(),
              info.getResourceUsageMetrics().getVirtualMemoryUsage(),
              info.getResourceUsageMetrics().getHeapUsage()));
        }
      }
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      splits.add(new LoadSplit(striper.splitFor(inputDir,
              info.getInputBytes(), 3), maps, i,
            info.getInputBytes(), info.getInputRecords(),
            info.getOutputBytes(), info.getOutputRecords(),
            reduceByteRatio, reduceRecordRatio, specBytes, specRecords,
            info.getResourceUsageMetrics(), metrics));
    }
    pushDescription(id(), splits);
  }
View Full Code Here

      return;
    }
    final int maps = jobdesc.getNumberMaps();
    final int reds = jobdesc.getNumberReduces();
    for (int i = 0; i < maps; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      mapInputBytesTotal += info.getInputBytes();
      mapOutputBytesTotal += info.getOutputBytes();
      mapOutputRecordsTotal += info.getOutputRecords();
    }
    final double[] reduceRecordRatio = new double[reds];
    final double[] reduceByteRatio = new double[reds];
    for (int i = 0; i < reds; ++i) {
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.REDUCE, i);
      reduceByteRatio[i] = info.getInputBytes() / (1.0 * mapOutputBytesTotal);
      reduceRecordRatio[i] =
        info.getInputRecords() / (1.0 * mapOutputRecordsTotal);
    }
    final InputStriper striper = new InputStriper(inputDir, mapInputBytesTotal);
    final List<InputSplit> splits = new ArrayList<InputSplit>();
    for (int i = 0; i < maps; ++i) {
      final int nSpec = reds / maps + ((reds % maps) > i ? 1 : 0);
      final long[] specBytes = new long[nSpec];
      final long[] specRecords = new long[nSpec];
      for (int j = 0; j < nSpec; ++j) {
        final TaskInfo info =
          jobdesc.getTaskInfo(TaskType.REDUCE, i + j * maps);
        specBytes[j] = info.getOutputBytes();
        specRecords[j] = info.getOutputRecords();
        if (LOG.isDebugEnabled()) {
          LOG.debug(String.format("SPEC(%d) %d -> %d %d %d", id(), i,
              i + j * maps, info.getOutputRecords(), info.getOutputBytes()));
        }
      }
      final TaskInfo info = jobdesc.getTaskInfo(TaskType.MAP, i);
      splits.add(new LoadSplit(striper.splitFor(inputDir,
              info.getInputBytes(), 3), maps, i,
            info.getInputBytes(), info.getInputRecords(),
            info.getOutputBytes(), info.getOutputRecords(),
            reduceByteRatio, reduceRecordRatio, specBytes, specRecords));
    }
    pushDescription(id(), splits);
  }
View Full Code Here

      long[] specInputBytes = new long[runTasks.length];
      long[] specOutputRecords = new long[runTasks.length];
      long[] specOutputBytes = new long[runTasks.length];

      for (int i = 0; i < runTasks.length; ++i) {
        final TaskInfo specInfo;
        final Counters counters = runTasks[i].getCounters();
        switch (type) {
          case MAP:
             runInputBytes[i] = counters.findCounter("FileSystemCounters",
                 "HDFS_BYTES_READ").getValue() -
                 counters.findCounter(Task.Counter.SPLIT_RAW_BYTES).getValue();
             runInputRecords[i] =
               (int)counters.findCounter(MAP_INPUT_RECORDS).getValue();
             runOutputBytes[i] =
               counters.findCounter(MAP_OUTPUT_BYTES).getValue();
             runOutputRecords[i] =
               (int)counters.findCounter(MAP_OUTPUT_RECORDS).getValue();

            specInfo = spec.getTaskInfo(TaskType.MAP, i);
            specInputRecords[i] = specInfo.getInputRecords();
            specInputBytes[i] = specInfo.getInputBytes();
            specOutputRecords[i] = specInfo.getOutputRecords();
            specOutputBytes[i] = specInfo.getOutputBytes();
            System.out.printf(type + " SPEC: %9d -> %9d :: %5d -> %5d\n",
                 specInputBytes[i], specOutputBytes[i],
                 specInputRecords[i], specOutputRecords[i]);
            System.out.printf(type + " RUN:  %9d -> %9d :: %5d -> %5d\n",
                 runInputBytes[i], runOutputBytes[i],
                 runInputRecords[i], runOutputRecords[i]);
            break;
          case REDUCE:
            runInputBytes[i] = 0;
            runInputRecords[i] =
              (int)counters.findCounter(REDUCE_INPUT_RECORDS).getValue();
            runOutputBytes[i] =
              counters.findCounter("FileSystemCounters",
                  "HDFS_BYTES_WRITTEN").getValue();
            runOutputRecords[i] =
              (int)counters.findCounter(REDUCE_OUTPUT_RECORDS).getValue();


            specInfo = spec.getTaskInfo(TaskType.REDUCE, i);
            // There is no reliable counter for reduce input bytes. The
            // variable-length encoding of intermediate records and other noise
            // make this quantity difficult to estimate. The shuffle and spec
            // input bytes are included in debug output for reference, but are
            // not checked
            specInputBytes[i] = 0;
            specInputRecords[i] = specInfo.getInputRecords();
            specOutputRecords[i] = specInfo.getOutputRecords();
            specOutputBytes[i] = specInfo.getOutputBytes();
            System.out.printf(type + " SPEC: (%9d) -> %9d :: %5d -> %5d\n",
                 specInfo.getInputBytes(), specOutputBytes[i],
                 specInputRecords[i], specOutputRecords[i]);
            System.out.printf(type + " RUN:  (%9d) -> %9d :: %5d -> %5d\n",
                 counters.findCounter(REDUCE_SHUFFLE_BYTES).getValue(),
                 runOutputBytes[i], runInputRecords[i], runOutputRecords[i]);
            break;
View Full Code Here

   
    @Override
    public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) {
      switch (taskType) {
        case MAP:
          return new TaskInfo(m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1);
        case REDUCE:
          return new TaskInfo(r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1);
        default:
          throw new IllegalArgumentException("Not interested");
      }
    }
View Full Code Here

    public TaskAttemptInfo getTaskAttemptInfo(
      TaskType taskType, int taskNumber, int taskAttemptNumber) {
      switch (taskType) {
        case MAP:
          return new MapTaskAttemptInfo(
            State.SUCCEEDED, new TaskInfo(
              m_bytesIn[taskNumber], m_recsIn[taskNumber],
              m_bytesOut[taskNumber], m_recsOut[taskNumber], -1),100);

        case REDUCE:
          return new ReduceTaskAttemptInfo(
            State.SUCCEEDED, new TaskInfo(
              r_bytesIn[taskNumber], r_recsIn[taskNumber],
              r_bytesOut[taskNumber], r_recsOut[taskNumber], -1),100,100,100);
      }
      throw new UnsupportedOperationException();
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.tools.rumen.TaskInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.