Package org.apache.flink.api.java

Examples of org.apache.flink.api.java.CollectionEnvironment


  private final String SUFFIX = "-suffixed";
 
  @Test
  public void testUnaryOp() {
    try {
      ExecutionEnvironment env = new CollectionEnvironment();
     
      DataSet<String> bcData = env.fromElements(SUFFIX);
     
      List<String> result = new ArrayList<String>();
     
      env.fromElements(TEST_DATA)
          .map(new SuffixAppender()).withBroadcastSet(bcData, BC_VAR_NAME)
          .output(new LocalCollectionOutputFormat<String>(result));
     
      env.execute();
     
      assertEquals(TEST_DATA.length, result.size());
      for (String s : result) {
        assertTrue(s.indexOf(SUFFIX) > 0);
      }
View Full Code Here


  }
 
  @Test
  public void testBinaryOp() {
    try {
      ExecutionEnvironment env = new CollectionEnvironment();
     
      DataSet<String> bcData = env.fromElements(SUFFIX);
      DataSet<String> inData = env.fromElements(TEST_DATA);
     
      List<String> result = new ArrayList<String>();
     
      inData.cross(inData).with(new SuffixCross()).withBroadcastSet(bcData, BC_VAR_NAME)
          .output(new LocalCollectionOutputFormat<String>(result));
     
      env.execute();
     
      assertEquals(TEST_DATA.length * TEST_DATA.length, result.size());
      for (String s : result) {
        assertTrue(s.indexOf(SUFFIX) == 2);
      }
View Full Code Here

  @Test
  public void testAccumulator() {
    try {
      final int NUM_ELEMENTS = 100;
     
      ExecutionEnvironment env = new CollectionEnvironment();
     
      env.generateSequence(1, NUM_ELEMENTS)
        .map(new CountingMapper())
        .output(new DiscardingOuputFormat<Long>());
     
      JobExecutionResult result = env.execute();
     
      assertTrue(result.getNetRuntime() >= 0);
     
      assertEquals(NUM_ELEMENTS, result.getAccumulatorResult(ACCUMULATOR_NAME));
    }
View Full Code Here

public class CollectionExecutionIterationTest implements java.io.Serializable {

  @Test
  public void testBulkIteration() {
    try {
      ExecutionEnvironment env = new CollectionEnvironment();
     
      IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(10);
     
      DataSet<Integer> result = iteration.closeWith(iteration.map(new AddSuperstepNumberMapper()));
     
      List<Integer> collected = new ArrayList<Integer>();
      result.output(new LocalCollectionOutputFormat<Integer>(collected));
     
      env.execute();
     
      assertEquals(1, collected.size());
      assertEquals(56, collected.get(0).intValue());
    }
    catch (Exception e) {
View Full Code Here

  }
 
  @Test
  public void testBulkIterationWithTerminationCriterion() {
    try {
      ExecutionEnvironment env = new CollectionEnvironment();
     
      IterativeDataSet<Integer> iteration = env.fromElements(1).iterate(100);
     
      DataSet<Integer> iterationResult = iteration.map(new AddSuperstepNumberMapper());

      DataSet<Integer> terminationCriterion = iterationResult.filter(new FilterFunction<Integer>() {
        public boolean filter(Integer value) {
          return value < 50;
        }
      });
     
      List<Integer> collected = new ArrayList<Integer>();
     
      iteration.closeWith(iterationResult, terminationCriterion)
          .output(new LocalCollectionOutputFormat<Integer>(collected));
     
      env.execute();
     
      assertEquals(1, collected.size());
      assertEquals(56, collected.get(0).intValue());
    }
    catch (Exception e) {
View Full Code Here

  }

  @Test
  public void testDeltaIteration() {
    try {
      ExecutionEnvironment env = new CollectionEnvironment();

      @SuppressWarnings("unchecked")
      DataSet<Tuple2<Integer, Integer>> solInput = env.fromElements(
          new Tuple2<Integer, Integer>(1, 0),
          new Tuple2<Integer, Integer>(2, 0),
          new Tuple2<Integer, Integer>(3, 0),
          new Tuple2<Integer, Integer>(4, 0));
     
      @SuppressWarnings("unchecked")
      DataSet<Tuple1<Integer>> workInput = env.fromElements(
          new Tuple1<Integer>(1),
          new Tuple1<Integer>(2),
          new Tuple1<Integer>(3),
          new Tuple1<Integer>(4));


      // Perform a delta iteration where we add those values to the workset where
      // the second tuple field is smaller than the first tuple field.
      // At the end both tuple fields must be the same.

      DeltaIteration<Tuple2<Integer, Integer>, Tuple1<Integer>> iteration =
        solInput.iterateDelta(workInput, 10, 0);

      DataSet<Tuple2<Integer, Integer>> solDelta = iteration.getSolutionSet().join(
          iteration.getWorkset()).where(0).equalTo(0).with(
          new JoinFunction<Tuple2<Integer, Integer>, Tuple1<Integer>, Tuple2<Integer, Integer>>() {

        @Override
        public Tuple2<Integer, Integer> join(Tuple2<Integer, Integer> first,
            Tuple1<Integer> second) throws Exception {
          return new Tuple2<Integer, Integer>(first.f0, first.f1 + 1);
        }
      });

      DataSet<Tuple1<Integer>> nextWorkset = solDelta.flatMap(
          new FlatMapFunction<Tuple2<Integer, Integer>, Tuple1<Integer>>() {
        @Override
        public void flatMap(Tuple2<Integer, Integer> in, Collector<Tuple1<Integer>>
            out) throws Exception {
          if (in.f1 < in.f0) {
            out.collect(new Tuple1<Integer>(in.f0));
          }
        }
      });


      List<Tuple2<Integer, Integer>> collected = new ArrayList<Tuple2<Integer, Integer>>();

      iteration.closeWith(solDelta, nextWorkset)
          .output(new LocalCollectionOutputFormat<Tuple2<Integer, Integer>>(collected));

      env.execute();

      // verify that both tuple fields are now the same
      for (Tuple2<Integer, Integer> t: collected) {
        assertEquals(t.f0, t.f1);
      }
View Full Code Here

TOP

Related Classes of org.apache.flink.api.java.CollectionEnvironment

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.