Package org.apache.flink.runtime.jobgraph

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex


    final int NUM_TASKS = 31;
   
    JobManager jm = null;
   
    try {
      final AbstractJobVertex sender = new AbstractJobVertex("Sender");
      final AbstractJobVertex receiver = new AbstractJobVertex("Receiver");
     
      sender.setInvokableClass(Sender.class);
      receiver.setInvokableClass(ReceiverBlockingOnce.class);
      sender.setParallelism(NUM_TASKS);
      receiver.setParallelism(NUM_TASKS);
     
      receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE);
     
      SlotSharingGroup sharingGroup = new SlotSharingGroup();
      sender.setSlotSharingGroup(sharingGroup);
      receiver.setSlotSharingGroup(sharingGroup);
     
      final JobGraph jobGraph = new JobGraph("Pointwise Job", sender, receiver);
      jobGraph.setNumberOfExecutionRetries(1);
     
      // make sure we have fast heartbeats and failure detection
View Full Code Here


  public void testForwardJob() {
   
    final int NUM_TASKS = 31;
   
    try {
      final AbstractJobVertex sender = new AbstractJobVertex("Sender");
      final AbstractJobVertex receiver = new AbstractJobVertex("Receiver");
     
      sender.setInvokableClass(Sender.class);
      receiver.setInvokableClass(Receiver.class);
     
      sender.setParallelism(NUM_TASKS);
      receiver.setParallelism(NUM_TASKS);
     
      receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE);
     
      SlotSharingGroup sharingGroup = new SlotSharingGroup(sender.getID(), receiver.getID());
      sender.setSlotSharingGroup(sharingGroup);
      receiver.setSlotSharingGroup(sharingGroup);
     
      receiver.setStrictlyCoLocatedWith(sender);
     
      final JobGraph jobGraph = new JobGraph("Pointwise Job", sender, receiver);
     
      final JobManager jm = startJobManager(NUM_TASKS);
     
View Full Code Here

  @Test
  public void testExecutionWithFailingTaskManager() {
    final int NUM_TASKS = 20;
   
    try {
      final AbstractJobVertex sender = new AbstractJobVertex("Sender");
      final AbstractJobVertex receiver = new AbstractJobVertex("Receiver");
      sender.setInvokableClass(Sender.class);
      receiver.setInvokableClass(BlockingReceiver.class);
      sender.setParallelism(NUM_TASKS);
      receiver.setParallelism(NUM_TASKS);
      receiver.connectNewDataSetAsInput(sender, DistributionPattern.POINTWISE);
     
      SlotSharingGroup sharingGroup = new SlotSharingGroup();
      sender.setSlotSharingGroup(sharingGroup);
      receiver.setSlotSharingGroup(sharingGroup);
     
      final JobGraph jobGraph = new JobGraph("Pointwise Job", sender, receiver);
     
      final JobManager jm = startJobManager(2, NUM_TASKS / 2);
     
      final TaskManager tm1 = ((LocalInstanceManager) jm.getInstanceManager()).getTaskManagers()[0];
      final TaskManager tm2 = ((LocalInstanceManager) jm.getInstanceManager()).getTaskManagers()[1];
     
      final GlobalBufferPool bp1 = tm1.getChannelManager().getGlobalBufferPool();
      final GlobalBufferPool bp2 = tm2.getChannelManager().getGlobalBufferPool();
     
      try {
        JobSubmissionResult result = jm.submitJob(jobGraph);

        if (result.getReturnCode() != AbstractJobResult.ReturnCode.SUCCESS) {
          System.out.println(result.getDescription());
        }
        assertEquals(AbstractJobResult.ReturnCode.SUCCESS, result.getReturnCode());
       
        ExecutionGraph eg = jm.getCurrentJobs().get(jobGraph.getJobID());
       
        // wait until everyone has settled in
        long deadline = System.currentTimeMillis() + 2000;
        while (System.currentTimeMillis() < deadline) {
         
          boolean allrunning = true;
          for (ExecutionVertex v : eg.getJobVertex(receiver.getID()).getTaskVertices()) {
            if (v.getCurrentExecutionAttempt().getState() != ExecutionState.RUNNING) {
              allrunning = false;
              break;
            }
          }
View Full Code Here

      final JobVertexID jid1 = new JobVertexID();
      final JobVertexID jid2 = new JobVertexID();
      final JobVertexID jid3 = new JobVertexID();
      final JobVertexID jid4 = new JobVertexID();
     
      AbstractJobVertex v1 = new AbstractJobVertex("v1", jid1);
      AbstractJobVertex v2 = new AbstractJobVertex("v2", jid2);
      AbstractJobVertex v3 = new AbstractJobVertex("v3", jid3);
      AbstractJobVertex v4 = new AbstractJobVertex("v4", jid4);
     
      v1.setParallelism(10);
      v2.setParallelism(10);
      v3.setParallelism(10);
      v4.setParallelism(10);
     
      v1.setInvokableClass(RegularPactTask.class);
      v2.setInvokableClass(RegularPactTask.class);
      v3.setInvokableClass(RegularPactTask.class);
      v4.setInvokableClass(RegularPactTask.class);
     
      v2.connectNewDataSetAsInput(v1, DistributionPattern.BIPARTITE);
      v3.connectNewDataSetAsInput(v2, DistributionPattern.BIPARTITE);
      v4.connectNewDataSetAsInput(v2, DistributionPattern.BIPARTITE);
     
      ExecutionGraph eg = spy(new ExecutionGraph(jobId, "some job", new Configuration()));
      doAnswer(new Answer<Void>() {
        @Override
        public Void answer(InvocationOnMock invocation) {
View Full Code Here

    final JobID jobId = new JobID();
   
    final JobVertexID jid1 = new JobVertexID();
    final JobVertexID jid2 = new JobVertexID();
   
    AbstractJobVertex v1 = new AbstractJobVertex("v1", jid1);
    AbstractJobVertex v2 = new AbstractJobVertex("v2", jid2);
   
    v1.setParallelism(dop1);
    v2.setParallelism(dop2);
   
    v1.setInvokableClass(RegularPactTask.class);
    v2.setInvokableClass(RegularPactTask.class);
   
    // execution graph that executes actions synchronously
    ExecutionGraph eg = new ExecutionGraph(jobId, "some job", new Configuration());
    eg.setQueuedSchedulingAllowed(false);
   
View Full Code Here

   
    final JobID jobId = new JobID();
    final String jobName = "Test Job Sample Name";
    final Configuration cfg = new Configuration();
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
    AbstractJobVertex v3 = new AbstractJobVertex("vertex3");
    AbstractJobVertex v4 = new AbstractJobVertex("vertex4");
    AbstractJobVertex v5 = new AbstractJobVertex("vertex5");
   
    v1.setParallelism(5);
    v2.setParallelism(7);
    v3.setParallelism(2);
    v4.setParallelism(11);
    v5.setParallelism(4);
   
    v2.connectNewDataSetAsInput(v1, DistributionPattern.BIPARTITE);
    v4.connectNewDataSetAsInput(v2, DistributionPattern.BIPARTITE);
    v4.connectNewDataSetAsInput(v3, DistributionPattern.BIPARTITE);
    v5.connectNewDataSetAsInput(v4, DistributionPattern.BIPARTITE);
    v5.connectNewDataSetAsInput(v3, DistributionPattern.BIPARTITE);
   
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2, v3, v4, v5));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
View Full Code Here

    final JobID jobId = new JobID();
    final String jobName = "Test Job Sample Name";
    final Configuration cfg = new Configuration();
   
    // construct part one of the execution graph
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
    AbstractJobVertex v3 = new AbstractJobVertex("vertex3");
   
    v1.setParallelism(5);
    v2.setParallelism(7);
    v3.setParallelism(2);
   
    // this creates an intermediate result for v1
    v2.connectNewDataSetAsInput(v1, DistributionPattern.BIPARTITE);
   
    // create results for v2 and v3
    IntermediateDataSet v2result = v2.createAndAddResultDataSet();
    IntermediateDataSet v3result_1 = v3.createAndAddResultDataSet();
    IntermediateDataSet v3result_2 = v3.createAndAddResultDataSet();
   
   
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2, v3));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    // attach the second part of the graph
   
    AbstractJobVertex v4 = new AbstractJobVertex("vertex4");
    AbstractJobVertex v5 = new AbstractJobVertex("vertex5");
    v4.setParallelism(11);
    v5.setParallelism(4);
   
    v4.connectDataSetAsInput(v2result, DistributionPattern.BIPARTITE);
    v4.connectDataSetAsInput(v3result_1, DistributionPattern.BIPARTITE);
    v5.connectNewDataSetAsInput(v4, DistributionPattern.BIPARTITE);
    v5.connectDataSetAsInput(v3result_2, DistributionPattern.BIPARTITE);
   
    List<AbstractJobVertex> ordered2 = new ArrayList<AbstractJobVertex>(Arrays.asList(v4, v5));
   
    try {
      eg.attachJobGraph(ordered2);
View Full Code Here

    final JobID jobId = new JobID();
    final String jobName = "Test Job Sample Name";
    final Configuration cfg = new Configuration();
   
    // construct part one of the execution graph
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
    AbstractJobVertex v3 = new AbstractJobVertex("vertex3");
   
    v1.setParallelism(5);
    v2.setParallelism(7);
    v3.setParallelism(2);
   
    // this creates an intermediate result for v1
    v2.connectNewDataSetAsInput(v1, DistributionPattern.BIPARTITE);
   
    // create results for v2 and v3
    IntermediateDataSet v2result = v2.createAndAddResultDataSet();
    IntermediateDataSet v3result_1 = v3.createAndAddResultDataSet();
    IntermediateDataSet v3result_2 = v3.createAndAddResultDataSet();
   
   
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2, v3));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    // attach the second part of the graph
   
    AbstractJobVertex v4 = new AbstractJobVertex("vertex4");
    AbstractJobVertex v5 = new AbstractJobVertex("vertex5");
    v4.setParallelism(11);
    v5.setParallelism(4);
   
    v4.connectIdInput(v2result.getId(), DistributionPattern.BIPARTITE);
    v4.connectIdInput(v3result_1.getId(), DistributionPattern.BIPARTITE);
    v5.connectNewDataSetAsInput(v4, DistributionPattern.BIPARTITE);
    v5.connectIdInput(v3result_2.getId(), DistributionPattern.BIPARTITE);
   
    List<AbstractJobVertex> ordered2 = new ArrayList<AbstractJobVertex>(Arrays.asList(v4, v5));
   
    try {
      eg.attachJobGraph(ordered2);
View Full Code Here

    final JobID jobId = new JobID();
    final String jobName = "Test Job Sample Name";
    final Configuration cfg = new Configuration();
   
    // construct part one of the execution graph
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    v1.setParallelism(7);
   
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    // attach the second part of the graph
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
    v2.connectIdInput(new IntermediateDataSetID(), DistributionPattern.BIPARTITE);
   
    List<AbstractJobVertex> ordered2 = new ArrayList<AbstractJobVertex>(Arrays.asList(v2));
   
    try {
      eg.attachJobGraph(ordered2);
View Full Code Here

  public void testCannotConnectWrongOrder() {
    final JobID jobId = new JobID();
    final String jobName = "Test Job Sample Name";
    final Configuration cfg = new Configuration();
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
    AbstractJobVertex v3 = new AbstractJobVertex("vertex3");
    AbstractJobVertex v4 = new AbstractJobVertex("vertex4");
    AbstractJobVertex v5 = new AbstractJobVertex("vertex5");
   
    v1.setParallelism(5);
    v2.setParallelism(7);
    v3.setParallelism(2);
    v4.setParallelism(11);
    v5.setParallelism(4);
   
    v2.connectNewDataSetAsInput(v1, DistributionPattern.BIPARTITE);
    v4.connectNewDataSetAsInput(v2, DistributionPattern.BIPARTITE);
    v4.connectNewDataSetAsInput(v3, DistributionPattern.BIPARTITE);
    v5.connectNewDataSetAsInput(v4, DistributionPattern.BIPARTITE);
    v5.connectNewDataSetAsInput(v3, DistributionPattern.BIPARTITE);
   
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2, v3, v5, v4));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
View Full Code Here

TOP

Related Classes of org.apache.flink.runtime.jobgraph.AbstractJobVertex

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.