Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.StatusReporter


    public static class ErrorCallback {

        @SuppressWarnings("unchecked")
        public static void handleError(EvalFunc evalFunc, Exception e) {
            evalFunc.getLogger().error(e);
            StatusReporter reporter = PigStatusReporter.getInstance();
            if (reporter != null &&
                    reporter.getCounter(evalFunc.getClass().getName(), e.toString()) != null) {
                reporter.getCounter(evalFunc.getClass().getName(), e.toString()).increment(1L);
            }
        }
View Full Code Here


        }

        @SuppressWarnings("unchecked")
        public static void handleTimeout(EvalFunc evalFunc, Exception e) {
            evalFunc.getLogger().error(e);
            StatusReporter reporter = PigStatusReporter.getInstance();
            if (reporter != null &&
                    reporter.getCounter(evalFunc.getClass().getName(), "MonitoredUDF Timeout") != null) {
                reporter.getCounter(evalFunc.getClass().getName(), "MonitoredUDF Timeout").increment(1L);
            }
        }
View Full Code Here

    RecordReader<NullWritable, GridmixRecord> reader = new FakeRecordReader();

    LoadRecordGkGrWriter writer = new LoadRecordGkGrWriter();

    OutputCommitter committer = new CustomOutputCommitter();
    StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter();
    LoadSplit split = getLoadSplit();

    MapContext<NullWritable, GridmixRecord, GridmixKey, GridmixRecord> mapContext = new MapContextImpl<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>(
            conf, taskId, reader, writer, committer, reporter, split);
    // context
View Full Code Here

    Counter inputValueCounter = new GenericCounter();
    LoadRecordWriter output = new LoadRecordWriter();

    OutputCommitter committer = new CustomOutputCommitter();

    StatusReporter reporter = new DummyReporter();
    RawComparator<GridmixKey> comparator = new FakeRawComparator();

    ReduceContext<GridmixKey, GridmixRecord, NullWritable, GridmixRecord> reduceContext = new ReduceContextImpl<GridmixKey, GridmixRecord, NullWritable, GridmixRecord>(
            conf, taskid, input, counter, inputValueCounter, output, committer,
            reporter, comparator, GridmixKey.class, GridmixRecord.class);
View Full Code Here

    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
    TaskAttemptID taskId = new TaskAttemptID();
    FakeRecordLLReader reader = new FakeRecordLLReader();
    LoadRecordGkNullWriter writer = new LoadRecordGkNullWriter();
    OutputCommitter committer = new CustomOutputCommitter();
    StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter();
    SleepSplit split = getSleepSplit();
    MapContext<LongWritable, LongWritable, GridmixKey, NullWritable> mapcontext = new MapContextImpl<LongWritable, LongWritable, GridmixKey, NullWritable>(
            conf, taskId, reader, writer, committer, reporter, split);
    Context context = new WrappedMapper<LongWritable, LongWritable, GridmixKey, NullWritable>()
            .getMapContext(mapcontext);
View Full Code Here

    Counter inputValueCounter = new GenericCounter();
    RecordWriter<NullWritable, NullWritable> output = new LoadRecordReduceWriter();

    OutputCommitter committer = new CustomOutputCommitter();

    StatusReporter reporter = new DummyReporter();
    RawComparator<GridmixKey> comparator = new FakeRawComparator();

    ReduceContext<GridmixKey, NullWritable, NullWritable, NullWritable> reducecontext = new ReduceContextImpl<GridmixKey, NullWritable, NullWritable, NullWritable>(
            conf, taskId, input, counter, inputValueCounter, output, committer,
            reporter, comparator, GridmixKey.class, NullWritable.class);
View Full Code Here

   
    long currentTime = System.currentTimeMillis();
   
    // initialize the matcher class
    TaskAttemptID id = new TaskAttemptID("test", 1, TaskType.MAP, 1, 1);
    StatusReporter reporter = new DummyReporter(progress);
    TaskInputOutputContext context =
      new MapContextImpl(conf, id, null, null, null, reporter, null);
    FakeResourceUsageMatcherRunner matcher =
      new FakeResourceUsageMatcherRunner(context, null);
   
View Full Code Here

    public static class ErrorCallback {

        @SuppressWarnings("unchecked")
        public static void handleError(EvalFunc evalFunc, Exception e) {
            evalFunc.getLogger().error(e);
            StatusReporter reporter = PigStatusReporter.getInstance();
            if (reporter != null &&
                    reporter.getCounter(evalFunc.getClass().getName(), e.toString()) != null) {
                reporter.getCounter(evalFunc.getClass().getName(), e.toString()).increment(1L);
            }
        }
View Full Code Here

        }

        @SuppressWarnings("unchecked")
        public static void handleTimeout(EvalFunc evalFunc, Exception e) {
            evalFunc.getLogger().error(e);
            StatusReporter reporter = PigStatusReporter.getInstance();
            if (reporter != null &&
                    reporter.getCounter(evalFunc.getClass().getName(), "MonitoredUDF Timeout") != null) {
                reporter.getCounter(evalFunc.getClass().getName(), "MonitoredUDF Timeout").increment(1L);
            }
        }
View Full Code Here

   
    long currentTime = System.currentTimeMillis();
   
    // initialize the matcher class
    TaskAttemptID id = new TaskAttemptID("test", 1, true, 1, 1);
    StatusReporter reporter = new DummyReporter(progress);
    TaskInputOutputContext context =
      new MapContext(conf, id, null, null, null, reporter, null);
    FakeResourceUsageMatcherRunner matcher =
      new FakeResourceUsageMatcherRunner(context, null);
   
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.StatusReporter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.