Package org.apache.flink.hadoopcompatibility.mapred.wrapper

Examples of org.apache.flink.hadoopcompatibility.mapred.wrapper.HadoopDummyReporter


  @Override
  public void open(Configuration parameters) throws Exception {
    super.open(parameters);
    this.reducer.configure(jobConf);
   
    this.reporter = new HadoopDummyReporter();
    this.reduceCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>();
    Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0);
    this.valueIterator = new HadoopTupleUnwrappingIterator<KEYIN, VALUEIN>(inKeyClass);
  }
View Full Code Here


  public void open(Configuration parameters) throws Exception {
    super.open(parameters);
    this.reducer.configure(jobConf);
    this.combiner.configure(jobConf);
   
    this.reporter = new HadoopDummyReporter();
    Class<KEYIN> inKeyClass = (Class<KEYIN>) TypeExtractor.getParameterType(Reducer.class, reducer.getClass(), 0);
    this.valueIterator = new HadoopTupleUnwrappingIterator<KEYIN, VALUEIN>(inKeyClass);
    this.combineCollector = new HadoopOutputCollector<KEYIN, VALUEIN>();
    this.reduceCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>();
  }
View Full Code Here

  @Override
  public void open(Configuration parameters) throws Exception {
    super.open(parameters);
    this.mapper.configure(jobConf);
   
    this.reporter = new HadoopDummyReporter();
    this.outputCollector = new HadoopOutputCollector<KEYOUT, VALUEOUT>();
  }
View Full Code Here

   * commit the task by moving the output file out from the temporary directory.
   * @throws IOException
   */
  @Override
  public void close() throws IOException {
    this.recordWriter.close(new HadoopDummyReporter());
   
    if (this.fileOutputCommitter.needsTaskCommit(this.context)) {
      this.fileOutputCommitter.commitTask(this.context);
    }
    this.fileOutputCommitter.commitJob(this.jobContext);
View Full Code Here

    return new DefaultInputSplitAssigner(inputSplits);
  }

  @Override
  public void open(HadoopInputSplit split) throws IOException {
    this.recordReader = this.hadoopInputFormat.getRecordReader(split.getHadoopInputSplit(), jobConf, new HadoopDummyReporter());
    key = this.recordReader.createKey();
    value = this.recordReader.createValue();
    this.fetched = false;
  }
View Full Code Here

    return new DefaultInputSplitAssigner(inputSplits);
  }
 
  @Override
  public void open(HadoopInputSplit split) throws IOException {
    this.recordReader = this.mapredInputFormat.getRecordReader(split.getHadoopInputSplit(), jobConf, new HadoopDummyReporter());
    if (this.recordReader instanceof Configurable) {
      ((Configurable) this.recordReader).setConf(jobConf);
    }
    key = this.recordReader.createKey();
    value = this.recordReader.createValue();
View Full Code Here

   * commit the task by moving the output file out from the temporary directory.
   * @throws IOException
   */
  @Override
  public void close() throws IOException {
    this.recordWriter.close(new HadoopDummyReporter());
    if (this.fileOutputCommitterWrapper.needsTaskCommit(this.jobConf, TaskAttemptID.forName(this.jobConf.get("mapred.task.id")))) {
      this.fileOutputCommitterWrapper.commitTask(this.jobConf, TaskAttemptID.forName(this.jobConf.get("mapred.task.id")));
    }
  //TODO: commitjob when all the tasks are finished
  }
View Full Code Here

TOP

Related Classes of org.apache.flink.hadoopcompatibility.mapred.wrapper.HadoopDummyReporter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.