Package org.apache.hadoop.mapreduce.Mapper

Examples of org.apache.hadoop.mapreduce.Mapper.Context


  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Test
  public void testKeyValueImporter() throws Exception {
    KeyValueImporter importer = new KeyValueImporter();
    Configuration configuration = new Configuration();
    Context ctx = mock(Context.class);
    when(ctx.getConfiguration()).thenReturn(configuration);

    doAnswer(new Answer<Void>() {

      @Override
      public Void answer(InvocationOnMock invocation) throws Throwable {
View Full Code Here


  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Test
  public void testKeyValueImporter() throws Exception {
    KeyValueImporter importer = new KeyValueImporter();
    Configuration configuration = new Configuration();
    Context ctx = mock(Context.class);
    when(ctx.getConfiguration()).thenReturn(configuration);

    doAnswer(new Answer<Void>() {

      @Override
      public Void answer(InvocationOnMock invocation) throws Throwable {
View Full Code Here

  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Test
  public void testKeyValueImporter() throws Exception {
    KeyValueImporter importer = new KeyValueImporter();
    Configuration configuration = new Configuration();
    Context ctx = mock(Context.class);
    when(ctx.getConfiguration()).thenReturn(configuration);

    doAnswer(new Answer<Void>() {

      @Override
      public Void answer(InvocationOnMock invocation) throws Throwable {
View Full Code Here

    LoadSplit split = getLoadSplit();

    MapContext<NullWritable, GridmixRecord, GridmixKey, GridmixRecord> mapcontext = new MapContextImpl<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>(
            conf, taskid, reader, writer, committer, reporter, split);
    // context
    Context ctxt = new WrappedMapper<NullWritable, GridmixRecord, GridmixKey, GridmixRecord>()
            .getMapContext(mapcontext);

    reader.initialize(split, ctxt);
    ctxt.getConfiguration().setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
    CompressionEmulationUtil.setCompressionEmulationEnabled(
            ctxt.getConfiguration(), true);

    LoadJob.LoadMapper mapper = new LoadJob.LoadMapper();
    // setup, map, clean
    mapper.run(ctxt);
View Full Code Here

    OutputCommitter committer = new CustomOutputCommitter();
    StatusReporter reporter = new TaskAttemptContextImpl.DummyReporter();
    SleepSplit split = getSleepSplit();
    MapContext<LongWritable, LongWritable, GridmixKey, NullWritable> mapcontext = new MapContextImpl<LongWritable, LongWritable, GridmixKey, NullWritable>(
            conf, taskid, reader, writer, committer, reporter, split);
    Context context = new WrappedMapper<LongWritable, LongWritable, GridmixKey, NullWritable>()
            .getMapContext(mapcontext);

    long start = System.currentTimeMillis();
    LOG.info("start:" + start);
    LongWritable key = new LongWritable(start + 2000);
View Full Code Here

    // Setup the conf
    GiraphConfiguration tmpConf = new GiraphConfiguration();
    GiraphConstants.COMPUTATION_CLASS.set(tmpConf, IntNoOpComputation.class);
    conf = new ImmutableClassesGiraphConfiguration(tmpConf);

    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    // Start the service
    serverData = MockUtils.createNewServerData(conf, context);
    serverData.prepareSuperstep();
    workerInfo = new WorkerInfo();
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void connectSingleClientServer() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
    WorkerInfo workerInfo = new WorkerInfo();
    NettyServer server =
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void connectOneClientToThreeServers() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
   RequestServerHandler.Factory requestServerHandlerFactory =
       new WorkerRequestServerHandler.Factory(serverData);
View Full Code Here

   *
   * @throws IOException
   */
  @Test
  public void connectThreeClientsToOneServer() throws IOException {
    @SuppressWarnings("rawtypes")
    Context context = mock(Context.class);
    when(context.getConfiguration()).thenReturn(conf);

    ServerData<IntWritable, IntWritable, IntWritable> serverData =
        MockUtils.createNewServerData(conf, context);
    WorkerInfo workerInfo = new WorkerInfo();
    NettyServer server = new NettyServer(conf,
View Full Code Here

    /** Output stream to dump the strings. */
    private DataOutputStream out;

    @Override
    public void preApplication() {
      Context context = getContext();
      FileSystem fs;

      try {
        fs = FileSystem.get(context.getConfiguration());

        String p = context.getConfiguration()
            .get(SimpleVertexWithWorkerContext.OUTPUTDIR);
        if (p == null) {
          throw new IllegalArgumentException(
              SimpleVertexWithWorkerContext.OUTPUTDIR +
              " undefined!");
        }

        Path path = new Path(p);
        if (!fs.exists(path)) {
          throw new IllegalArgumentException(path +
              " doesn't exist");
        }

        Path outF = new Path(path, FILENAME +
            context.getTaskAttemptID());
        if (fs.exists(outF)) {
          throw new IllegalArgumentException(outF +
              " aready exists");
        }

View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.Mapper.Context

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.