Examples of Counters


Examples of org.apache.hadoop.mapred.Counters

                    case FINISH_TIME:
                        endTime = Long.valueOf(val);
                        break;                   
                    case COUNTERS: {
                        try {
                            Counters counters = Counters.fromEscapedCompactString(val);
                            long rows = counters.getGroup(TASK_COUNTER_GROUP)
                                    .getCounterForName(MAP_INPUT_RECORDS).getCounter();
                            if (rows < minMapRows) minMapRows = rows;
                            if (rows > maxMapRows) maxMapRows = rows;
                        } catch (ParseException e) {
                            LOG.warn("Failed to parse job counters", e);
                        }
                    }
                    break;
                    default:
                        LOG.warn("JobHistory.Keys." + key
                                + " : NOT INCLUDED IN PERFORMANCE ADVISOR MAP COUNTERS");
                        break;
                    }
                }
                duration = endTime - startTime;
                if (minMapTime > duration) minMapTime = duration;
                if (maxMapTime < duration) maxMapTime = duration;
                totalMapTime += duration;       
            } else if (task.get(Keys.TASK_TYPE).equals("REDUCE")) {
                Map<JobHistory.Keys, String> reduceTask = task.getValues();
                Map<JobHistory.Keys, String> successTaskAttemptMap  =  getLastSuccessfulTaskAttempt(task);
                // NOTE: Following would lead to less number of actual tasks collected in the tasklist array
                if (successTaskAttemptMap != null) {
                    reduceTask.putAll(successTaskAttemptMap);
                } else {
                    LOG.warn("Task:<" + task.get(Keys.TASKID) + "> is not successful - SKIPPING");
                }
                long duration = 0;
                long startTime = 0;
                long endTime = 0;
                int size = reduceTask.size();
                numberReduces++;

                Iterator<Map.Entry<JobHistory.Keys, String>> kv = reduceTask.entrySet().iterator();
                for (int j = 0; j < size; j++) {
                    Map.Entry<JobHistory.Keys, String> rtc = kv.next();
                    JobHistory.Keys key = rtc.getKey();
                    String val = rtc.getValue();
                    switch (key) {
                    case START_TIME:
                        startTime = Long.valueOf(val);
                        break;
                    case FINISH_TIME:
                        endTime = Long.valueOf(val);
                        break;
                    case COUNTERS: {
                        try {
                            Counters counters = Counters.fromEscapedCompactString(val);
                            long rows = counters.getGroup(TASK_COUNTER_GROUP)
                                    .getCounterForName(REDUCE_INPUT_RECORDS).getCounter();
                            if (rows < minReduceRows) minReduceRows = rows;
                            if (rows > maxReduceRows) maxReduceRows = rows;
                        } catch (ParseException e) {
                            LOG.warn("Failed to parse job counters", e);
View Full Code Here

Examples of org.apache.hadoop.mapred.Counters

        JobID mapRedJobID = job.getAssignedJobID();
        RunningJob runningJob = null;
        try {
            runningJob = jobClient.getJob(mapRedJobID);
            if(runningJob != null) {
                Counters counters = runningJob.getCounters();
                if (counters==null)
                {
                    long nullCounterCount = aggMap.get(PigWarning.NULL_COUNTER_COUNT)==null?0 : aggMap.get(PigWarning.NULL_COUNTER_COUNT);
                    nullCounterCount++;
                    aggMap.put(PigWarning.NULL_COUNTER_COUNT, nullCounterCount);
                }
                for (Enum e : PigWarning.values()) {
                    if (e != PigWarning.NULL_COUNTER_COUNT) {
                        Long currentCount = aggMap.get(e);
                        currentCount = (currentCount == null ? 0 : currentCount);
                        // This code checks if the counters is null, if it is,
                        // we need to report to the user that the number
                        // of warning aggregations may not be correct. In fact,
                        // Counters should not be null, it is
                        // a hadoop bug, once this bug is fixed in hadoop, the
                        // null handling code should never be hit.
                        // See Pig-943
                        if (counters != null)
                            currentCount += counters.getCounter(e);
                        aggMap.put(e, currentCount);
                    }
                }
            }
        } catch (IOException ioe) {
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

  @Test
  public final void testMapperValidValues() throws IOException,
      InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    Counters counters = new Counters();
    Counter counter = counters
        .findCounter(MergeRecordCounter.TOTAL_RECORDS_NEW);
    when(context.getCounter(MergeRecordCounter.TOTAL_RECORDS_NEW))
        .thenReturn(counter);

    MergeKeyMapper mapper = new MergeKeyMapper();
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

              .newInstance(job.getOutputFormatClass(),
                  job.getConfiguration()).getClass()
              .getName());
      job.waitForCompletion(false);
      if (job.isComplete()) {
        Counters counters = job.getCounters();
        totalRecordsOld = counters.findCounter(
            MergeRecordCounter.TOTAL_RECORDS_OLD).getValue();
        totalRecordsNew = counters.findCounter(
            MergeRecordCounter.TOTAL_RECORDS_NEW).getValue();
        badRecords = counters.findCounter(
            MergeRecordCounter.BAD_RECORD).getValue();
        output = counters.findCounter(MergeRecordCounter.OUTPUT)
            .getValue();
        logger.info("Total old records read are: " + totalRecordsOld);
        logger.info("Total new records read are: " + totalRecordsNew);
        logger.info("Bad Records are: " + badRecords);
        logger.info("Output records are: " + output);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

public class TestMergeKeyMapper {
  @Test(expected = IOException.class)
  public final void testMapperForNullKeyValue() throws IOException,
      InterruptedException {
    Mapper.Context context = mock(Mapper.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.BAD_RECORD);
    when(context.getCounter(MergeRecordCounter.BAD_RECORD)).thenReturn(
        counter);
    MergeKeyMapper mapper = new MergeKeyMapper();
    Text val = new Text("valueOfKey");
    mapper.map(null, val, context);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

    values.add(value1);
    values.add(value2);
    values.add(value3);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(DedupRecordCounter.OUTPUT);
    when(context.getCounter(DedupRecordCounter.OUTPUT)).thenReturn(counter);
    DedupValueReducer dedupValueReducer = new DedupValueReducer();
    dedupValueReducer.reduce(hihoTuple, values, context);
    verify(context).write(value1, key);
    assertEquals(1, context.getCounter(DedupRecordCounter.OUTPUT)
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

    String value1 = null;
    ArrayList<String> values = new ArrayList<String>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(DedupRecordCounter.OUTPUT);
    when(context.getCounter(DedupRecordCounter.OUTPUT)).thenReturn(counter);
    DedupValueReducer dedupValueReducer = new DedupValueReducer();
    dedupValueReducer.reduce(hihoTuple, values, context);
    verify(context).write(value1, key);
    assertEquals(1, context.getCounter(DedupRecordCounter.OUTPUT)
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

    Text value1 = new Text("value1");
    ArrayList<Text> values = new ArrayList<Text>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(DedupRecordCounter.OUTPUT);
    when(context.getCounter(DedupRecordCounter.OUTPUT)).thenReturn(counter);
    DedupValueReducer dedupReducer = new DedupValueReducer();
    dedupReducer.reduce(hihoTuple, values, context);
    verify(context).write(value1, key);
    assertEquals(1, context.getCounter(DedupRecordCounter.OUTPUT)
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

    BytesWritable value1 = new BytesWritable("value1".getBytes());
    ArrayList<BytesWritable> values = new ArrayList<BytesWritable>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(DedupRecordCounter.OUTPUT);
    when(context.getCounter(DedupRecordCounter.OUTPUT)).thenReturn(counter);
    DedupValueReducer dedupReducer = new DedupValueReducer();
    dedupReducer.reduce(hihoTuple, values, context);
    verify(context).write(value1, key);
    assertEquals(1, context.getCounter(DedupRecordCounter.OUTPUT)
View Full Code Here

Examples of org.apache.hadoop.mapreduce.Counters

    IntWritable value1 = new IntWritable(456);
    ArrayList<IntWritable> values = new ArrayList<IntWritable>();
    values.add(value1);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(DedupRecordCounter.OUTPUT);
    when(context.getCounter(DedupRecordCounter.OUTPUT)).thenReturn(counter);
    DedupValueReducer dedupReducer = new DedupValueReducer();
    dedupReducer.reduce(hihoTuple, values, context);
    verify(context).write(value1, key);
    assertEquals(1, context.getCounter(DedupRecordCounter.OUTPUT)
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.