Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.JobContext


        // The above setLocation call could write to the conf within
        // the job - get a hold of the modified conf
        conf = job.getConfiguration();
        inputFormat = wrappedLoadFunc.getInputFormat();
        try {
            inpSplits = inputFormat.getSplits(new JobContext(conf,
                    new JobID()));
        } catch (InterruptedException e) {
            throw new IOException(e);
        }       
    }
View Full Code Here


                            .instantiateFuncFromSpec(ld.getLFile()
                                    .getFuncSpec());
                            Job job = new Job(conf);
                            loader.setLocation(location, job);
                            InputFormat inf = loader.getInputFormat();
                            List<InputSplit> splits = inf.getSplits(new JobContext(
                                    job.getConfiguration(), job.getJobID()));
                            List<List<InputSplit>> results = MapRedUtil
                            .getCombinePigSplits(splits, fs
                                    .getDefaultBlockSize(), conf);
                            numFiles += results.size();
View Full Code Here

     * @param job
     * @param st
     * @throws IOException
     */
    private void storeSchema(Job job, POStore st) throws IOException {
        JobContext jc = new JobContext(job.getJobConf(),
                new org.apache.hadoop.mapreduce.JobID());
        JobContext updatedJc = PigOutputCommitter.setUpContext(jc, st);
        PigOutputCommitter.storeCleanup(st, updatedJc.getConfiguration());
    }
View Full Code Here

    private void checkOutputSpecsHelper(List<POStore> stores, JobContext
            jobcontext) throws IOException, InterruptedException {
        for (POStore store : stores) {
            // make a copy of the original JobContext so that
            // each OutputFormat get a different copy
            JobContext jobContextCopy = new JobContext(
                    jobcontext.getConfiguration(), jobcontext.getJobID());
           
            // set output location
            PigOutputFormat.setLocation(jobContextCopy, store);
           
View Full Code Here

                // the inputSpecificJob - use this updated conf
               
                // get the InputFormat from it and ask for splits
                InputFormat inpFormat = loadFunc.getInputFormat();
                List<InputSplit> oneInputSplits = inpFormat.getSplits(
                        new JobContext(inputSpecificJob.getConfiguration(),
                                jobcontext.getJobID()));
                List<InputSplit> oneInputPigSplits = getPigSplits(
                        oneInputSplits, i, inpTargets.get(i), fs.getDefaultBlockSize(), combinable, confClone);
                splits.addAll(oneInputPigSplits);
            } catch (ExecException ee) {
View Full Code Here

   
    static JobContext setUpContext(JobContext context,
            POStore store) throws IOException {
        // make a copy of the context so that the actions after this call
        // do not end up updating the same context
        JobContext contextCopy = new JobContext(
                context.getConfiguration(), context.getJobID());
       
        // call setLocation() on the storeFunc so that if there are any
        // side effects like setting map.output.dir on the Configuration
        // in the Context are needed by the OutputCommitter, those actions
View Full Code Here

   
    @Override
    public void cleanupJob(JobContext context) throws IOException {
        // call clean up on all map and reduce committers
        for (Pair<OutputCommitter, POStore> mapCommitter : mapOutputCommitters) {           
            JobContext updatedContext = setUpContext(context,
                    mapCommitter.second);
            storeCleanup(mapCommitter.second, updatedContext.getConfiguration());
            mapCommitter.first.cleanupJob(updatedContext);
        }
        for (Pair<OutputCommitter, POStore> reduceCommitter :
            reduceOutputCommitters) {           
            JobContext updatedContext = setUpContext(context,
                    reduceCommitter.second);
            storeCleanup(reduceCommitter.second, updatedContext.getConfiguration());
            reduceCommitter.first.cleanupJob(updatedContext);
        }
      
    }
View Full Code Here

   
    @Override
    public void setupJob(JobContext context) throws IOException {
        // call set up on all map and reduce committers
        for (Pair<OutputCommitter, POStore> mapCommitter : mapOutputCommitters) {
            JobContext updatedContext = setUpContext(context,
                    mapCommitter.second);
            mapCommitter.first.setupJob(updatedContext);
        }
        for (Pair<OutputCommitter, POStore> reduceCommitter :
            reduceOutputCommitters) {
            JobContext updatedContext = setUpContext(context,
                    reduceCommitter.second);
            reduceCommitter.first.setupJob(updatedContext);
        }
    }
View Full Code Here

import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;

public class HadoopShims {
    static public JobContext cloneJobContext(JobContext original) throws IOException, InterruptedException {
        JobContext newContext = ContextFactory.cloneContext(original, original.getConfiguration());
        return newContext;
    }
View Full Code Here

        return newContext;
    }
   
    static public JobContext createJobContext(Configuration conf,
            JobID jobId) {
        JobContext newContext = new JobContextImpl(conf, jobId);
        return newContext;
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.JobContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.