Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.IntWritable$Comparator


                       Reporter reporter) throws IOException {
      int sum = 0;
      while (values.hasNext()) {
        sum += values.next().get();
      }
      output.collect(key, new IntWritable(sum));
    }
View Full Code Here


      }
      if (shardWriter.getNumDocs() > 0 && lastkey != null) {
        TaskID taskId = context.getTaskAttemptID().getTaskID();
        int partition = taskId.getId();
        System.out.println("###########>>>>"+partition);
        context.write(new IntWritable(partition),new Text(indexHdfsPath));
      }
      FileSystem lfs = FileSystem.getLocal(conf);
      if(lfs.exists(new Path(localtmpath)))
      {
        lfs.delete(new Path(localtmpath),true);
View Full Code Here

          }
      }
      return ;
    }
   
    lastkey = new IntWritable(key.getIndex());
   
    Iterator<DocumentMap> iterator = values.iterator();
    while (iterator.hasNext()) {
      if(doccount>maxDocCount||debugInfo>maxDocCount_l)
      {
View Full Code Here

                null, ae.getClass().getName(), ae.getMessage());
            responder.doRespond(authFailedCall);
            throw ae;
          }
          if (!isSecurityEnabled && authMethod != AuthMethod.SIMPLE) {
            doSaslReply(SaslStatus.SUCCESS, new IntWritable(
                SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null);
            authMethod = AuthMethod.SIMPLE;
            // client has already sent the initial Sasl message and we
            // should ignore it. Both client and server should fall back
            // to simple auth from now on.
View Full Code Here

    public Object getPrimitiveWritableObject(Object o) {
        IntOption object = (IntOption) o;
        if (object == null || object.isNull()) {
            return null;
        }
        return new IntWritable(object.get());
    }
View Full Code Here

  @Test
  public void testReduce() throws IOException {
    Text key = new Text();
    key.set("key");
    List<IntWritable> values = new ArrayList<IntWritable>();
    values.add(new IntWritable(1));
    values.add(new IntWritable(2));
    values.add(new IntWritable(3));

    JRubyReducer reducer = new JRubyReducer();
    JobConf conf = new JobConf();
    conf.set("mapred.ruby.script", "mapred.rb");
    reducer.configure(conf);
View Full Code Here

        public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException
        {
            int sum = 0;
            for (IntWritable val : values)
                sum += val.get();
            context.write(key, new IntWritable(sum));
        }
View Full Code Here

    conf.setStrings("io.serializations", conf.get("io.serializations"),
        "org.apache.hadoop.io.serializer.JavaSerialization");
    final MapDriver<Integer, IntWritable, IntWritable, Integer> driver = MapDriver
        .newMapDriver(new InverseMapper<Integer, IntWritable>())
        .withConfiguration(conf);
    driver.setInput(1, new IntWritable(2));
    driver.addOutput(new IntWritable(2), 1);
    driver.runTest();
  }
View Full Code Here

    final Configuration conf = new Configuration();
    conf.setStrings("io.serializations", conf.get("io.serializations"),
        "org.apache.hadoop.io.serializer.JavaSerialization");
    final ReduceDriver<Integer, IntWritable, Integer, IntWritable> driver = ReduceDriver
        .newReduceDriver(new IntSumReducer<Integer>()).withConfiguration(conf);
    driver.withInputKey(1).withInputValue(new IntWritable(2))
        .withOutput(1, new IntWritable(2)).runTest();
  }
View Full Code Here

    MapReduceDriver<LongWritable, Text, Text, IntWritable, Text, IntWritable> mapReduceDriver
      = MapReduceDriver.newMapReduceDriver(new StatefulMapper(), new Reducer());
   
    mapReduceDriver.addInput(new LongWritable(1L), new Text("hello"));
    mapReduceDriver.addInput(new LongWritable(2L), new Text("schmo"));
    mapReduceDriver.withOutput(new Text("SomeKey"), new IntWritable(2));
    mapReduceDriver.runTest();

  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.IntWritable$Comparator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.