Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext


    JobContext job = ContextFactory.createJobContext();
    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance1");
    ChunkInputFormat cif = new ChunkInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
    RecordReader<List<Entry<Key,Value>>,InputStream> rr = cif.createRecordReader(ris, tac);
    rr.initialize(ris, tac);
   
    assertTrue(rr.nextKeyValue());
    List<Entry<Key,Value>> info = rr.getCurrentKey();
View Full Code Here


    JobContext job = ContextFactory.createJobContext();
    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance2");
    ChunkInputFormat cif = new ChunkInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
    RecordReader<List<Entry<Key,Value>>,InputStream> crr = cif.createRecordReader(ris, tac);
    crr.initialize(ris, tac);
   
    assertTrue(crr.nextKeyValue());
    InputStream cis = crr.getCurrentValue();
View Full Code Here

    JobContext job = ContextFactory.createJobContext();
    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance3");
    ChunkInputFormat cif = new ChunkInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
    RecordReader<List<Entry<Key,Value>>,InputStream> crr = cif.createRecordReader(ris, tac);
    crr.initialize(ris, tac);
   
    assertTrue(crr.nextKeyValue());
    List<Entry<Key,Value>> info = crr.getCurrentKey();
View Full Code Here

    // Configure a mock task attempt context.
    Job job = new Job(conf);
    job.getConfiguration().set("mapred.output.dir", mTempDir.getRoot().getPath());
    Schema writerSchema = Schema.create(Schema.Type.INT);
    AvroJob.setOutputKeySchema(job, writerSchema);
    TaskAttemptContext context = createMock(TaskAttemptContext.class);
    expect(context.getConfiguration())
        .andReturn(job.getConfiguration()).anyTimes();
    expect(context.getTaskAttemptID())
        .andReturn(TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0"))
        .anyTimes();
    expect(context.getNumReduceTasks()).andReturn(1);

    // Create a mock record writer.
    @SuppressWarnings("unchecked")
    RecordWriter<AvroKey<Integer>, NullWritable> expectedRecordWriter
        = createMock(RecordWriter.class);
View Full Code Here

public class TestAvroKeyValueRecordWriter {
  @Test
  public void testWriteRecords() throws IOException {
    Job job = new Job();
    AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
    TaskAttemptContext context = createMock(TaskAttemptContext.class);

    replay(context);

    AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
    AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
View Full Code Here

  }
  @Test public void testUsingReflection() throws Exception {
    Job job = new Job();
    Schema schema = ReflectData.get().getSchema(R1.class);
    AvroJob.setOutputValueSchema(job, schema);
    TaskAttemptContext context = createMock(TaskAttemptContext.class);
    replay(context);

    R1 record = new R1();
    record.attribute = "test";
    AvroValue<R1> avroValue = new AvroValue<R1>(record);
View Full Code Here

  @Test
  public void testSyncableWriteRecords() throws IOException {
    Job job = new Job();
    AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
    TaskAttemptContext context = createMock(TaskAttemptContext.class);

    replay(context);

    AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
    AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
View Full Code Here

    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();

    // Create a mock task attempt context for this record reader.
    TaskAttemptContext context = createMock(TaskAttemptContext.class);
    expect(context.getConfiguration()).andReturn(conf).anyTimes();

    // Initialize the record reader.
    replay(inputSplit);
    replay(context);
    recordReader.initialize(inputSplit, context);
View Full Code Here

    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();

    // Create a mock task attempt context for this record reader.
    TaskAttemptContext context = createMock(TaskAttemptContext.class);
    expect(context.getConfiguration()).andReturn(conf).anyTimes();

    // Initialize the record reader.
    replay(inputSplit);
    replay(context);
    recordReader.initialize(inputSplit, context);
View Full Code Here

    checkBaseOutputPath(baseOutputPath);
    if (!namedOutputs.contains(namedOutput)) {
      throw new IllegalArgumentException("Undefined named output '" +
        namedOutput + "'");
    }
    TaskAttemptContext taskContext = getContext(namedOutput);
    getRecordWriter(taskContext, baseOutputPath).write(key, value);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskAttemptContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.