Package org.apache.flink.api.common

Examples of org.apache.flink.api.common.Plan


  @Test
  public void testCompileKMeansSingleStepWithOutStats() {
   
    KMeansBroadcast kmi = new KMeansBroadcast();
    Plan p = kmi.getPlan(String.valueOf(DEFAULT_PARALLELISM), IN_FILE, IN_FILE, OUT_FILE, String.valueOf(20));
   
    OptimizedPlan plan = compileNoStats(p);
    checkPlan(plan);
   
    new NepheleJobGraphGenerator().compileJobGraph(plan);
View Full Code Here


        DataSet<Tuple2<Long, Long>> result = initialVertices.runOperation(VertexCentricIteration.withPlainEdges(edges, new CCUpdater(), new CCMessager(), 100));
       
        result.print();
      }
     
      Plan p = env.createProgramPlan("Spargel Connected Components");
      OptimizedPlan op = compileNoStats(p);
     
      // check the sink
      SinkPlanNode sink = op.getDataSinks().iterator().next();
      assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
View Full Code Here

        DataSet<Tuple2<Long, Long>> result = initialVertices.runOperation(vcIter);
       
        result.print();
      }
     
      Plan p = env.createProgramPlan("Spargel Connected Components");
      OptimizedPlan op = compileNoStats(p);
     
      // check the sink
      SinkPlanNode sink = op.getDataSinks().iterator().next();
      assertEquals(ShipStrategyType.FORWARD, sink.getInput().getShipStrategy());
View Full Code Here

      .field(StringValue.class, 0)
      .field(StringValue.class, 1)
      .field(StringValue.class, 2)
      .field(StringValue.class, 3);
   
    Plan plan = new Plan(out, "HBase access Example");
    plan.setDefaultParallelism(numSubTasks);
    return plan;
  }
View Full Code Here

      .recordDelimiter('\n')
      .fieldDelimiter(' ')
      .field(StringValue.class, 0)
      .field(IntValue.class, 1);
   
    Plan plan = new Plan(out, "WordCount Example");
    plan.setDefaultParallelism(numSubTasks);
    return plan;
  }
View Full Code Here

    if (args.length < 3) {
      System.err.println(wc.getDescription());
      System.exit(1);
    }
   
    Plan plan = wc.getPlan(args);
   
    // This will execute the word-count embedded in a local context. replace this line by the commented
    // succeeding line to send the job to a local installation or to a cluster for execution
    LocalExecutor.execute(plan);
//    PlanExecutor ex = new RemoteExecutor("localhost", 6123, "target/pact-examples-0.4-SNAPSHOT-WordCount.jar");
View Full Code Here

    // compose failing program
    output.setInput(testMapper);
    testMapper.setInput(input);

    // generate plan
    Plan plan = new Plan(output);
    plan.setDefaultParallelism(DOP);

    // optimize and compile plan
    PactCompiler pc = new PactCompiler(new DataStatistics());
    OptimizedPlan op = pc.compile(plan);
   
View Full Code Here

    // compose working program
    output.setInput(testMapper);
    testMapper.setInput(input);

    // generate plan
    Plan plan = new Plan(output);
    plan.setDefaultParallelism(4);

    // optimize and compile plan
    PactCompiler pc = new PactCompiler(new DataStatistics());
    OptimizedPlan op = pc.compile(plan);
View Full Code Here

    output.setInput(testCross);
    testCross.setFirstInput(input_left);
    testCross.setSecondInput(input_right);

    return new Plan(output);
  }
View Full Code Here

        .name("Count Words")
        .build();
    HadoopDataSink<Text, IntWritable> out = new HadoopDataSink<Text, IntWritable>(new TextOutputFormat<Text, IntWritable>(),new JobConf(), "Hadoop TextOutputFormat", reducer, Text.class, IntWritable.class);
    TextOutputFormat.setOutputPath(out.getJobConf(), new Path(output));

    Plan plan = new Plan(out, "Hadoop OutputFormat Example");
    plan.setDefaultParallelism(numSubTasks);
    return plan;
  }
View Full Code Here

TOP

Related Classes of org.apache.flink.api.common.Plan

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.