Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.OutputCommitter


  @Test
  public void testCommitJobFailsJob() {

    JobImpl mockJob = mock(JobImpl.class);
    mockJob.tasks = new HashMap<TaskId, Task>();
    OutputCommitter mockCommitter = mock(OutputCommitter.class);
    EventHandler mockEventHandler = mock(EventHandler.class);
    JobContext mockJobContext = mock(JobContext.class);

    when(mockJob.getCommitter()).thenReturn(mockCommitter);
    when(mockJob.getEventHandler()).thenReturn(mockEventHandler);
View Full Code Here


  @Test
  public void testCheckJobCompleteSuccess() {
   
    JobImpl mockJob = mock(JobImpl.class);
    mockJob.tasks = new HashMap<TaskId, Task>();
    OutputCommitter mockCommitter = mock(OutputCommitter.class);
    EventHandler mockEventHandler = mock(EventHandler.class);
    JobContext mockJobContext = mock(JobContext.class);
   
    when(mockJob.getCommitter()).thenReturn(mockCommitter);
    when(mockJob.getEventHandler()).thenReturn(mockEventHandler);
View Full Code Here

    tasks.put(mockTask.getID(), mockTask);
    mockJob.tasks = tasks;
   
    try {
      // Just in case the code breaks and reaches these calls
      OutputCommitter mockCommitter = mock(OutputCommitter.class);
      EventHandler mockEventHandler = mock(EventHandler.class);
      doNothing().when(mockCommitter).commitJob(any(JobContext.class));
      doNothing().when(mockEventHandler).handle(any(JobHistoryEvent.class));
    } catch (IOException e) {
      e.printStackTrace();   
View Full Code Here

    JobId jobId = MRBuilderUtils.newJobId(appId, 1);
    TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
    TaskAttemptListener taListener = mock(TaskAttemptListener.class);
    Path jobFile = mock(Path.class);
    JobConf jobConf = new JobConf();
    OutputCommitter outputCommitter = mock(OutputCommitter.class);
    TaskAttemptImpl taImpl =
        new MapTaskAttemptImpl(taskId, 1, eventHandler, jobFile, 1,
            taskSplitMetaInfo, jobConf, taListener, outputCommitter, null,
            null, clock, null);
    return taImpl;
View Full Code Here

    }

    private static class ImporterOutputFormat extends HFileOutputFormat {
        @Override
        public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException {
            final OutputCommitter baseOutputCommitter = super.getOutputCommitter(context);

            return new OutputCommitter() {
                @Override
                public void setupJob(JobContext jobContext) throws IOException {
                    baseOutputCommitter.setupJob(jobContext);
                }

                @Override
                public void setupTask(TaskAttemptContext taskContext) throws IOException {
                    baseOutputCommitter.setupTask(taskContext);
                }

                @Override
                public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException {
                    return baseOutputCommitter.needsTaskCommit(taskContext);
                }

                @Override
                public void commitTask(TaskAttemptContext taskContext) throws IOException {
                    baseOutputCommitter.commitTask(taskContext);
                }

                @Override
                public void abortTask(TaskAttemptContext taskContext) throws IOException {
                    baseOutputCommitter.abortTask(taskContext);
                }

                @Override
                public void abortJob(JobContext jobContext, JobStatus.State state) throws IOException {
                    try {
                        baseOutputCommitter.abortJob(jobContext, state);
                    } finally {
                        cleanupScratch(jobContext);
                    }
                }

                @Override
                public void commitJob(JobContext jobContext) throws IOException {
                    try {
                        baseOutputCommitter.commitJob(jobContext);
                        Configuration conf = jobContext.getConfiguration();
                        try {
                            //import hfiles
                            new LoadIncrementalHFiles(conf)
                                .doBulkLoad(HFileOutputFormat.getOutputPath(jobContext),
                                    new HTable(conf,
                                        conf.get(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY)));
                        } catch (Exception e) {
                            throw new IOException("BulkLoad failed.", e);
                        }
                    } finally {
                        cleanupScratch(jobContext);
                    }
                }

                @Override
                public void cleanupJob(JobContext context) throws IOException {
                    try {
                        baseOutputCommitter.cleanupJob(context);
                    } finally {
                        cleanupScratch(context);
                    }
                }
View Full Code Here

    private static class NullOutputFormat<K, V> extends
        org.apache.hadoop.mapreduce.lib.output.NullOutputFormat<K, V> {

        @Override
        public OutputCommitter getOutputCommitter(TaskAttemptContext context) {
            return new OutputCommitter() {
                public void abortTask(TaskAttemptContext taskContext) {
                }

                public void cleanupJob(JobContext jobContext) {
                }
View Full Code Here

        publishTest(job);
    }

    public void publishTest(Job job) throws Exception {
        OutputCommitter committer = new FileOutputCommitterContainer(job, null);
        committer.commitJob(job);

        Partition part = client.getPartition(dbName, tblName, Arrays.asList("p1"));
        assertNotNull(part);

        StorerInfo storer = InternalUtil.extractStorerInfo(part.getSd(), part.getParameters());
View Full Code Here

            outputCommitters = new LinkedHashMap<String, MultiOutputFormat.BaseOutputCommitterContainer>();
            String[] aliases = getOutputFormatAliases(context);
            for (String alias : aliases) {
                LOGGER.info("Creating output committer for alias: " + alias);
                TaskAttemptContext aliasContext = getTaskAttemptContext(alias, context);
                OutputCommitter baseCommitter = getOutputFormatInstance(aliasContext)
                        .getOutputCommitter(aliasContext);
                outputCommitters.put(alias,
                        new BaseOutputCommitterContainer(baseCommitter, aliasContext));
            }
        }
View Full Code Here

        @Override
        public void commitTask(TaskAttemptContext taskContext) throws IOException {
            for (String alias : outputCommitters.keySet()) {
                BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias);
                OutputCommitter baseCommitter = outputContainer.getBaseCommitter();
                TaskAttemptContext committerContext = outputContainer.getContext();
                if (baseCommitter.needsTaskCommit(committerContext)) {
                    LOGGER.info("Calling commitTask for alias: " + alias);
                    baseCommitter.commitTask(committerContext);
                }
            }
        }
View Full Code Here

                //We are in RecordWriter.close() make sense that the context would be TaskInputOutput
                bwriter.close(reporter);
            }
            for (Map.Entry<String, org.apache.hadoop.mapred.OutputCommitter> entry : baseDynamicCommitters.entrySet()) {
                org.apache.hadoop.mapred.TaskAttemptContext currContext = dynamicContexts.get(entry.getKey());
                OutputCommitter baseOutputCommitter = entry.getValue();
                if (baseOutputCommitter.needsTaskCommit(currContext)) {
                    baseOutputCommitter.commitTask(currContext);
                }
            }
        } else {
            getBaseRecordWriter().close(reporter);
        }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.OutputCommitter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.