Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.JobContext


   * Test getting iterator options for multiple options set
   */
  @SuppressWarnings("deprecation")
  @Test
  public void testGetIteratorOption() {
    JobContext job = new JobContext(new Configuration(), new JobID());
   
    AccumuloInputFormat.setIteratorOption(job, "iterator1", "key1", "value1");
    AccumuloInputFormat.setIteratorOption(job, "iterator2", "key2", "value2");
    AccumuloInputFormat.setIteratorOption(job, "iterator3", "key3", "value3");
   
View Full Code Here


  }
 
  @SuppressWarnings("deprecation")
  @Test
  public void testSetRegex() {
    JobContext job = new JobContext(new Configuration(), new JobID());
   
    String regex = ">\"*%<>\'\\";
   
    AccumuloInputFormat.setRegex(job, RegexType.ROW, regex);
   
View Full Code Here

      m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes()));
      bw.addMutation(m);
    }
    bw.close();
   
    JobContext job = new JobContext(new Configuration(), new JobID());
    AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable2", new Authorizations());
    AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmapinstance");
    AccumuloInputFormat input = new AccumuloInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());
    RecordReader<Key,Value> rr = input.createRecordReader(ris, tac);
    rr.initialize(ris, tac);
   
    TestMapper mapper = new TestMapper();
    Mapper<Key,Value,Key,Value>.Context context = mapper.new Context(job.getConfiguration(), tac.getTaskAttemptID(), rr, null, null, null, ris);
    while (rr.nextKeyValue()) {
      mapper.map(rr.getCurrentKey(), rr.getCurrentValue(), context);
    }
  }
View Full Code Here

      m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes()));
      bw.addMutation(m);
    }
    bw.close();
   
    JobContext job = new JobContext(new Configuration(), new JobID());
    AccumuloInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "testtable3", new Authorizations());
    AccumuloInputFormat.setMockInstance(job.getConfiguration(), "testmapinstance");
    final String regex = ".*1.*";
    AccumuloInputFormat.setRegex(job, RegexType.ROW, regex);
    AccumuloInputFormat input = new AccumuloInputFormat();
    RangeInputSplit ris = new RangeInputSplit();
    TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());
    RecordReader<Key,Value> rr = input.createRecordReader(ris, tac);
    rr.initialize(ris, tac);
   
    Pattern p = Pattern.compile(regex);
    while (rr.nextKeyValue()) {
View Full Code Here

                // the inputSpecificJob - use this updated conf
               
                // get the InputFormat from it and ask for splits
                InputFormat inpFormat = loadFunc.getInputFormat();
                List<InputSplit> oneInputSplits = inpFormat.getSplits(
                        new JobContext(inputSpecificJob.getConfiguration(),
                                jobcontext.getJobID()));
                List<InputSplit> oneInputPigSplits = getPigSplits(
                        oneInputSplits, i, inpTargets.get(i), fs.getDefaultBlockSize(), combinable, confClone);
                splits.addAll(oneInputPigSplits);
            } catch (ExecException ee) {
View Full Code Here

                            .instantiateFuncFromSpec(ld.getLFile()
                                    .getFuncSpec());
                            Job job = new Job(conf);
                            loader.setLocation(location, job);
                            InputFormat inf = loader.getInputFormat();
                            List<InputSplit> splits = inf.getSplits(new JobContext(
                                    job.getConfiguration(), job.getJobID()));
                            List<List<InputSplit>> results = MapRedUtil
                            .getCombinePigSplits(splits, fs
                                    .getDefaultBlockSize(), conf);
                            numFiles += results.size();
View Full Code Here

     * @param job
     * @param st
     * @throws IOException
     */
    private void storeSchema(Job job, POStore st) throws IOException {
        JobContext jc = new JobContext(job.getJobConf(),
                new org.apache.hadoop.mapreduce.JobID());
        JobContext updatedJc = PigOutputCommitter.setUpContext(jc, st);
        PigOutputCommitter.storeCleanup(st, updatedJc.getConfiguration());
    }
View Full Code Here

    checkExpectedRangeBins(10, new String[] {"C", "A", "B"}, new int[] {29, 9, 19});
    checkExpectedRangeBins(10, new String[] {"", "AA", "BB", "CC"}, new int[] {9, 19, 29, 39});
  }
 
  private RangePartitioner prepPartitioner(int numSubBins) {
    JobContext job = new JobContext(new Configuration(), new JobID());
    RangePartitioner.setNumSubBins(job, numSubBins);
    RangePartitioner rp = new RangePartitioner();
    rp.setConf(job.getConfiguration());
    return rp;
  }
View Full Code Here

   * @throws IOException
   *           Signals that an I/O exception has occurred.
   */
  @Test
  public void testMaxVersions() throws IOException {
    JobContext job = new JobContext(new Configuration(), new JobID());
    AccumuloInputFormat.setMaxVersions(job.getConfiguration(), 1);
    int version = AccumuloInputFormat.getMaxVersions(job.getConfiguration());
    assertEquals(1, version);
  }
View Full Code Here

   * @throws IOException
   *           Signals that an I/O exception has occurred.
   */
  @Test(expected = IOException.class)
  public void testMaxVersionsLessThan1() throws IOException {
    JobContext job = new JobContext(new Configuration(), new JobID());
    AccumuloInputFormat.setMaxVersions(job.getConfiguration(), 0);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.JobContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.