Package org.apache.hadoop.mrunit.internal.io

Examples of org.apache.hadoop.mrunit.internal.io.Serialization


    return outputs;
  }

  private Serialization getSerialization() {
    if (serialization == null) {
      serialization = new Serialization(getConfiguration());
    }
    return serialization;
  }
View Full Code Here


      recordWriter.close(taskAttemptContext);
    } catch (InterruptedException e) {
      throw new IOException(e);
    }

    final Serialization serialization = new Serialization(
        taskAttemptContext.getConfiguration());
    try {
      List<InputSplit> inputSplits = inputFormat.getSplits(taskAttemptContext);
      for (InputSplit inputSplit : inputSplits) {
        RecordReader<K, V> recordReader = inputFormat.createRecordReader(
            inputSplit, taskAttemptContext);
        recordReader.initialize(inputSplit, taskAttemptContext);
        while (recordReader.nextKeyValue()) {
          outputs.add(new Pair<K, V>(serialization.copy(recordReader
              .getCurrentKey()), serialization.copy(recordReader
              .getCurrentValue())));
        }
      }
    } catch (InterruptedException e) {
      throw new IOException(e);
View Full Code Here

  private final V valueContainer;
  private final Serialization serialization;

  public KeyValueReuseList(K keyContainer, V valueContainer, Configuration conf){
    super();
    serialization = new Serialization(conf);
    this.keyContainer = keyContainer;
    this.valueContainer = valueContainer;
  }
View Full Code Here

      recordWriter.close(taskAttemptContext);
    } catch (InterruptedException e) {
      throw new IOException(e);
    }

    final Serialization serialization = new Serialization(
        taskAttemptContext.getConfiguration());
    try {
      List<InputSplit> inputSplits = inputFormat.getSplits(taskAttemptContext);
      for (InputSplit inputSplit : inputSplits) {
        RecordReader<K, V> recordReader = inputFormat.createRecordReader(
            inputSplit, taskAttemptContext);
        recordReader.initialize(inputSplit, taskAttemptContext);
        while (recordReader.nextKeyValue()) {
          outputs.add(new Pair<K, V>(serialization.copy(recordReader
              .getCurrentKey()), serialization.copy(recordReader
              .getCurrentValue())));
        }
      }
    } catch (InterruptedException e) {
      throw new IOException(e);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mrunit.internal.io.Serialization

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.