Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.MapContext


   * into our Giraph BSP task code.
   * @param tid the TaskAttemptID to construct this Mapper#Context from.
   * @return sort of a Mapper#Context if you squint just right.
   */
  private Context buildProxyMapperContext(final TaskAttemptID tid) {
    MapContext mc = new MapContextImpl<Object, Object, Object, Object>(
      conf, // our Configuration, populated back at the GiraphYarnClient.
      tid,  // our TaskAttemptId, generated w/YARN app, container, attempt IDs
      null, // RecordReader here will never be used by Giraph
      null, // RecordWriter here will never be used by Giraph
      null, // OutputCommitter here will never be used by Giraph
View Full Code Here


        processError("===== Task Information Header =====" );

        processError("\nCommand: " + command);
        processError("\nStart time: " + new Date(System.currentTimeMillis()));
        if (job.getBoolean(MRConfiguration.TASK_IS_MAP, false)) {
            MapContext context = (MapContext)PigMapReduce.sJobContext;
            PigSplit pigSplit = (PigSplit)context.getInputSplit();
            int numPaths = pigSplit.getNumPaths();
            processError("\nPigSplit contains " + numPaths + " wrappedSplits.");

            StringBuilder sb = new StringBuilder();
            for(int i = 0; i < numPaths; i++) {
View Full Code Here

        processError("===== Task Information Header =====" );

        processError("\nCommand: " + command);
        processError("\nStart time: " + new Date(System.currentTimeMillis()));
        if (job.getBoolean("mapred.task.is.map", false)) {
            MapContext context = (MapContext)PigMapReduce.sJobContext;
            PigSplit pigSplit = (PigSplit)context.getInputSplit();
            int numPaths = pigSplit.getNumPaths();
            processError("\nPigSplit contains " + numPaths + " wrappedSplits.");

            StringBuilder sb = new StringBuilder();
            for(int i = 0; i < numPaths; i++) {
View Full Code Here

   
    // initialize the matcher class
    TaskAttemptID id = new TaskAttemptID("test", 1, true, 1, 1);
    StatusReporter reporter = new DummyReporter(progress);
    TaskInputOutputContext context =
      new MapContext(conf, id, null, null, null, reporter, null);
    FakeResourceUsageMatcherRunner matcher =
      new FakeResourceUsageMatcherRunner(context, null);
   
    // check if the matcher initialized the plugin
    String identifier = TestResourceUsageEmulatorPlugin.DEFAULT_IDENTIFIER;
View Full Code Here

            RecordWriter.class,
            OutputCommitter.class,
            StatusReporter.class,
            InputSplit.class);
          c.setAccessible(true);
          MapContext mc = (MapContext) c.newInstance(
            outer.getConfiguration(),
            outer.getTaskAttemptID(),
            new SubMapRecordReader(),
            new SubMapRecordWriter(),
            context.getOutputCommitter(),
View Full Code Here

            RecordWriter.class,
            OutputCommitter.class,
            StatusReporter.class,
            InputSplit.class);
          c.setAccessible(true);
          MapContext mc = (MapContext) c.newInstance(
            outer.getConfiguration(),
            outer.getTaskAttemptID(),
            new SubMapRecordReader(),
            new SubMapRecordWriter(),
            context.getOutputCommitter(),
View Full Code Here

        processError("===== Task Information Header =====" );

        processError("\nCommand: " + command);
        processError("\nStart time: " + new Date(System.currentTimeMillis()));
        if (job.getBoolean("mapred.task.is.map", false)) {
            MapContext context = (MapContext)PigMapReduce.sJobContext;
            PigSplit pigSplit = (PigSplit)context.getInputSplit();
            InputSplit wrappedSplit = pigSplit.getWrappedSplit();
            if (wrappedSplit instanceof FileSplit) {
                FileSplit mapInputFileSplit = (FileSplit)wrappedSplit;
                processError("\nInput-split file: " +
                             mapInputFileSplit.getPath().toString());
View Full Code Here

   * into our Giraph BSP task code.
   * @param tid the TaskAttemptID to construct this Mapper#Context from.
   * @return sort of a Mapper#Context if you squint just right.
   */
  private Context buildProxyMapperContext(final TaskAttemptID tid) {
    MapContext mc = new MapContextImpl<Object, Object, Object, Object>(
      conf, // our Configuration, populated back at the GiraphYarnClient.
      tid,  // our TaskAttemptId, generated w/YARN app, container, attempt IDs
      null, // RecordReader here will never be used by Giraph
      null, // RecordWriter here will never be used by Giraph
      null, // OutputCommitter here will never be used by Giraph
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.MapContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.