Package org.apache.flink.runtime.jobgraph

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex


 
  @Test
  public void testNToN() {
    final int N = 23;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(N);
    v2.setParallelism(N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here


 
  @Test
  public void test2NToN() {
    final int N = 17;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(2 * N);
    v2.setParallelism(N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

 
  @Test
  public void test3NToN() {
    final int N = 17;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(3 * N);
    v2.setParallelism(N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

 
  @Test
  public void testNTo2N() {
    final int N = 41;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(N);
    v2.setParallelism(2 * N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

 
  @Test
  public void testNTo7N() {
    final int N = 11;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(N);
    v2.setParallelism(7 * N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

    }
   
    final int factor = highDop / lowDop;
    final int delta = highDop % lowDop == 0 ? 0 : 1;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(lowDop);
    v2.setParallelism(highDop);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    int[] timesUsed = new int[lowDop];
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
View Full Code Here

    }
   
    final int factor = highDop / lowDop;
    final int delta = highDop % lowDop == 0 ? 0 : 1;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(highDop);
    v2.setParallelism(lowDop);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    int[] timesUsed = new int[highDop];
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
View Full Code Here

    int parallelism = vertexParallelism.get(vertexName);
    byte[] outputSelector = outputSelectors.get(vertexName);
    Map<String, OperatorState<?>> state = operatorStates.get(vertexName);

    // Create vertex object
    AbstractJobVertex vertex = new AbstractJobVertex(vertexName);

    this.jobGraph.addVertex(vertex);

    vertex.setInvokableClass(vertexClass);
    vertex.setParallelism(parallelism);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Parallelism set: {} for {}", parallelism, vertexName);
    }

    StreamConfig config = new StreamConfig(vertex.getConfiguration());

    config.setMutability(mutability.get(vertexName));
    config.setBufferTimeout(bufferTimeout.get(vertexName));

    config.setTypeWrapperIn1(typeWrapperIn1.get(vertexName));
View Full Code Here

   *            The partitioner
   */
  private <T> void connect(String upStreamVertexName, String downStreamVertexName,
      StreamPartitioner<T> partitionerObject) {

    AbstractJobVertex upStreamVertex = streamVertices.get(upStreamVertexName);
    AbstractJobVertex downStreamVertex = streamVertices.get(downStreamVertexName);

    StreamConfig config = new StreamConfig(upStreamVertex.getConfiguration());

    if (partitionerObject.getClass().equals(ForwardPartitioner.class)) {
      downStreamVertex
          .connectNewDataSetAsInput(upStreamVertex, DistributionPattern.POINTWISE);
    } else {
      downStreamVertex
          .connectNewDataSetAsInput(upStreamVertex, DistributionPattern.BIPARTITE);
    }

    if (LOG.isDebugEnabled()) {
      LOG.debug("CONNECTED: {} - {} -> {}", partitionerObject.getClass().getSimpleName(),
View Full Code Here

      vertex.setSlotSharingGroup(shareGroup);
    }

    for (String iterID : new HashSet<String>(iterationIds.values())) {
      CoLocationGroup ccg = new CoLocationGroup();
      AbstractJobVertex tail = streamVertices.get(iterationIDtoTailName.get(iterID));
      AbstractJobVertex head = streamVertices.get(iterationIDtoHeadName.get(iterID));

      ccg.addVertex(head);
      ccg.addVertex(tail);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.flink.runtime.jobgraph.AbstractJobVertex

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.