Package org.apache.flink.runtime.operators.util

Examples of org.apache.flink.runtime.operators.util.TaskConfig$DelegatingConfiguration


    return vertex;
  }

  private AbstractJobVertex createDataSinkVertex(SinkPlanNode node) throws CompilerException {
    final OutputFormatVertex vertex = new OutputFormatVertex(node.getNodeName());
    final TaskConfig config = new TaskConfig(vertex.getConfiguration());

    vertex.setInvokableClass(DataSinkTask.class);
    vertex.getConfiguration().setInteger(DataSinkTask.DEGREE_OF_PARALLELISM_KEY, node.getDegreeOfParallelism());

    // set user code
    config.setStubWrapper(node.getPactContract().getUserCodeWrapper());
    config.setStubParameters(node.getPactContract().getParameters());

    return vertex;
  }
View Full Code Here


    }
   
    // create or adopt the head vertex
    final AbstractJobVertex toReturn;
    final AbstractJobVertex headVertex;
    final TaskConfig headConfig;
    if (merge) {
      final PlanNode successor = pspn.getOutgoingChannels().get(0).getTarget();
      headVertex = (AbstractJobVertex) this.vertices.get(successor);
     
      if (headVertex == null) {
        throw new CompilerException(
          "Bug: Trying to merge solution set with its sucessor, but successor has not been created.");
      }
     
      // reset the vertex type to iteration head
      headVertex.setInvokableClass(IterationHeadPactTask.class);
      headConfig = new TaskConfig(headVertex.getConfiguration());
      toReturn = null;
    } else {
      // instantiate the head vertex and give it a no-op driver as the driver strategy.
      // everything else happens in the post visit, after the input (the initial partial solution)
      // is connected.
      headVertex = new AbstractJobVertex("PartialSolution ("+iteration.getNodeName()+")");
      headVertex.setInvokableClass(IterationHeadPactTask.class);
      headConfig = new TaskConfig(headVertex.getConfiguration());
      headConfig.setDriver(NoOpDriver.class);
      toReturn = headVertex;
    }
   
    // create the iteration descriptor and the iteration to it
    IterationDescriptor descr = this.iterations.get(iteration);
View Full Code Here

    }
   
    // create or adopt the head vertex
    final AbstractJobVertex toReturn;
    final AbstractJobVertex headVertex;
    final TaskConfig headConfig;
    if (merge) {
      final PlanNode successor = wspn.getOutgoingChannels().get(0).getTarget();
      headVertex = (AbstractJobVertex) this.vertices.get(successor);
     
      if (headVertex == null) {
        throw new CompilerException(
          "Bug: Trying to merge solution set with its sucessor, but successor has not been created.");
      }
     
      // reset the vertex type to iteration head
      headVertex.setInvokableClass(IterationHeadPactTask.class);
      headConfig = new TaskConfig(headVertex.getConfiguration());
      toReturn = null;
    } else {
      // instantiate the head vertex and give it a no-op driver as the driver strategy.
      // everything else happens in the post visit, after the input (the initial partial solution)
      // is connected.
      headVertex = new AbstractJobVertex("IterationHead("+iteration.getNodeName()+")");
      headVertex.setInvokableClass(IterationHeadPactTask.class);
      headConfig = new TaskConfig(headVertex.getConfiguration());
      headConfig.setDriver(NoOpDriver.class);
      toReturn = headVertex;
    }
   
    headConfig.setSolutionSetUnmanaged(iteration.getIterationNode().getIterationContract().isSolutionSetUnManaged());
   
    // create the iteration descriptor and the iteration to it
    IterationDescriptor descr = this.iterations.get(iteration);
    if (descr == null) {
      throw new CompilerException("Bug: Iteration descriptor was not created at when translating the iteration node.");
View Full Code Here

  {
    @SuppressWarnings("unchecked")
    CsvInputFormat verticesInFormat = new CsvInputFormat(' ', LongValue.class);
    InputFormatVertex verticesInput = JobGraphUtils.createInput(verticesInFormat, verticesPath, "VerticesInput",
      jobGraph, numSubTasks);
    TaskConfig verticesInputConfig = new TaskConfig(verticesInput.getConfiguration());
    {
      verticesInputConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
      verticesInputConfig.setOutputSerializer(serializer);

      // chained mapper that duplicates the id
      TaskConfig chainedMapperConfig = new TaskConfig(new Configuration());
      chainedMapperConfig.setStubWrapper(new UserCodeClassWrapper<IdDuplicator>(IdDuplicator.class));
      chainedMapperConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
      chainedMapperConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
      chainedMapperConfig.setInputSerializer(serializer, 0);

      chainedMapperConfig.setOutputSerializer(serializer);
      chainedMapperConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
      chainedMapperConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
      chainedMapperConfig.setOutputComparator(comparator, 0);
      chainedMapperConfig.setOutputComparator(comparator, 1);

      verticesInputConfig.addChainedTask(ChainedCollectorMapDriver.class, chainedMapperConfig, "ID Duplicator");
    }

    return verticesInput;
View Full Code Here

    // edges
    @SuppressWarnings("unchecked")
    CsvInputFormat edgesInFormat = new CsvInputFormat(' ', LongValue.class, LongValue.class);
    InputFormatVertex edgesInput = JobGraphUtils.createInput(edgesInFormat, edgesPath, "EdgesInput", jobGraph,
      numSubTasks);
    TaskConfig edgesInputConfig = new TaskConfig(edgesInput.getConfiguration());
    {
      edgesInputConfig.setOutputSerializer(serializer);
      edgesInputConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
      edgesInputConfig.setOutputComparator(comparator, 0);
    }

    return edgesInput;
  }
View Full Code Here

      TypeSerializerFactory<?> serializer,
      TypeComparatorFactory<?> comparator,
      TypePairComparatorFactory<?, ?> pairComparator) {

    AbstractJobVertex head = JobGraphUtils.createTask(IterationHeadPactTask.class, "Join With Edges (Iteration Head)", jobGraph, numSubTasks);
    TaskConfig headConfig = new TaskConfig(head.getConfiguration());
    {
      headConfig.setIterationId(ITERATION_ID);

      // initial input / workset
      headConfig.addInputToGroup(0);
      headConfig.setInputSerializer(serializer, 0);
      headConfig.setInputComparator(comparator, 0);
      headConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
      headConfig.setIterationHeadPartialSolutionOrWorksetInputIndex(0);

      // regular plan input (second input to the join)
      headConfig.addInputToGroup(1);
      headConfig.setInputSerializer(serializer, 1);
      headConfig.setInputComparator(comparator, 1);
      headConfig.setInputLocalStrategy(1, LocalStrategy.NONE);
      headConfig.setInputCached(1, true);
      headConfig.setRelativeInputMaterializationMemory(1, MEM_FRAC_PER_CONSUMER);

      // initial solution set input
      headConfig.addInputToGroup(2);
      headConfig.setInputSerializer(serializer, 2);
      headConfig.setInputComparator(comparator, 2);
      headConfig.setInputLocalStrategy(2, LocalStrategy.NONE);
      headConfig.setIterationHeadSolutionSetInputIndex(2);

      headConfig.setSolutionSetSerializer(serializer);
      headConfig.setSolutionSetComparator(comparator);

      // back channel / iterations
      headConfig.setIsWorksetIteration();
      headConfig.setRelativeBackChannelMemory(MEM_FRAC_PER_CONSUMER);
      headConfig.setRelativeSolutionSetMemory(MEM_FRAC_PER_CONSUMER );

      // output into iteration
      headConfig.setOutputSerializer(serializer);
      headConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
      headConfig.setOutputComparator(comparator, 0);

      // final output
      TaskConfig headFinalOutConfig = new TaskConfig(new Configuration());
      headFinalOutConfig.setOutputSerializer(serializer);
      headFinalOutConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
      headConfig.setIterationHeadFinalOutputConfig(headFinalOutConfig);

      // the sync
      headConfig.setIterationHeadIndexOfSyncOutput(2);
View Full Code Here

    graph.addVertex(inputVertex);
   
    inputVertex.setInvokableClass(DataSourceTask.class);
    inputVertex.setParallelism(degreeOfParallelism);

    TaskConfig inputConfig = new TaskConfig(inputVertex.getConfiguration());
    inputConfig.setStubWrapper(stub);
   
    return inputVertex;
  }
View Full Code Here

    jobGraph.addVertex(sync);
   
    sync.setInvokableClass(IterationSynchronizationSinkTask.class);
    sync.setParallelism(1);
   
    TaskConfig syncConfig = new TaskConfig(sync.getConfiguration());
    syncConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, parallelism);
    return sync;
  }
View Full Code Here

 
  private void finalizeBulkIteration(IterationDescriptor descr) {
   
    final BulkIterationPlanNode bulkNode = (BulkIterationPlanNode) descr.getIterationNode();
    final AbstractJobVertex headVertex = descr.getHeadTask();
    final TaskConfig headConfig = new TaskConfig(headVertex.getConfiguration());
    final TaskConfig headFinalOutputConfig = descr.getHeadFinalResultConfig();
   
    // ------------ finalize the head config with the final outputs and the sync gate ------------
    final int numStepFunctionOuts = headConfig.getNumOutputs();
    final int numFinalOuts = headFinalOutputConfig.getNumOutputs();
   
    if (numStepFunctionOuts == 0) {
      throw new CompilerException("The iteration has no operation inside the step function.");
    }
   
    headConfig.setIterationHeadFinalOutputConfig(headFinalOutputConfig);
    headConfig.setIterationHeadIndexOfSyncOutput(numStepFunctionOuts + numFinalOuts);
    final double relativeMemForBackChannel = bulkNode.getRelativeMemoryPerSubTask();
    if (relativeMemForBackChannel <= 0) {
      throw new CompilerException("Bug: No memory has been assigned to the iteration back channel.");
    }
    headConfig.setRelativeBackChannelMemory(relativeMemForBackChannel);
   
    // --------------------------- create the sync task ---------------------------
    final AbstractJobVertex sync = new AbstractJobVertex("Sync(" + bulkNode.getNodeName() + ")");
    sync.setInvokableClass(IterationSynchronizationSinkTask.class);
    sync.setParallelism(1);
    this.auxVertices.add(sync);
   
    final TaskConfig syncConfig = new TaskConfig(sync.getConfiguration());
    syncConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, headVertex.getParallelism());

    // set the number of iteration / convergence criterion for the sync
    final int maxNumIterations = bulkNode.getIterationNode().getIterationContract().getMaximumNumberOfIterations();
    if (maxNumIterations < 1) {
      throw new CompilerException("Cannot create bulk iteration with unspecified maximum number of iterations.");
    }
    syncConfig.setNumberOfIterations(maxNumIterations);
   
    // connect the sync task
    sync.connectNewDataSetAsInput(headVertex, DistributionPattern.POINTWISE);
   
    // ----------------------------- create the iteration tail ------------------------------
   
    final PlanNode rootOfTerminationCriterion = bulkNode.getRootOfTerminationCriterion();
    final PlanNode rootOfStepFunction = bulkNode.getRootOfStepFunction();
    final TaskConfig tailConfig;
   
    AbstractJobVertex rootOfStepFunctionVertex = (AbstractJobVertex) this.vertices.get(rootOfStepFunction);
    if (rootOfStepFunctionVertex == null) {
      // last op is chained
      final TaskInChain taskInChain = this.chainedTasks.get(rootOfStepFunction);
      if (taskInChain == null) {
        throw new CompilerException("Bug: Tail of step function not found as vertex or chained task.");
      }
      rootOfStepFunctionVertex = (AbstractJobVertex) taskInChain.getContainingVertex();

      // the fake channel is statically typed to pact record. no data is sent over this channel anyways.
      tailConfig = taskInChain.getTaskConfig();
    } else {
      tailConfig = new TaskConfig(rootOfStepFunctionVertex.getConfiguration());
    }
   
    tailConfig.setIsWorksetUpdate();
   
    // No following termination criterion
    if (rootOfStepFunction.getOutgoingChannels().isEmpty()) {
     
      rootOfStepFunctionVertex.setInvokableClass(IterationTailPactTask.class);
     
      tailConfig.setOutputSerializer(bulkNode.getSerializerForIterationChannel());
    }
   
   
    // create the fake output task for termination criterion, if needed
    final TaskConfig tailConfigOfTerminationCriterion;
    // If we have a termination criterion and it is not an intermediate node
    if(rootOfTerminationCriterion != null && rootOfTerminationCriterion.getOutgoingChannels().isEmpty()) {
      AbstractJobVertex rootOfTerminationCriterionVertex = (AbstractJobVertex) this.vertices.get(rootOfTerminationCriterion);
     
     
      if (rootOfTerminationCriterionVertex == null) {
        // last op is chained
        final TaskInChain taskInChain = this.chainedTasks.get(rootOfTerminationCriterion);
        if (taskInChain == null) {
          throw new CompilerException("Bug: Tail of termination criterion not found as vertex or chained task.");
        }
        rootOfTerminationCriterionVertex = (AbstractJobVertex) taskInChain.getContainingVertex();

        // the fake channel is statically typed to pact record. no data is sent over this channel anyways.
        tailConfigOfTerminationCriterion = taskInChain.getTaskConfig();
      } else {
        tailConfigOfTerminationCriterion = new TaskConfig(rootOfTerminationCriterionVertex.getConfiguration());
      }
     
      rootOfTerminationCriterionVertex.setInvokableClass(IterationTailPactTask.class);
      // Hack
      tailConfigOfTerminationCriterion.setIsSolutionSetUpdate();
      tailConfigOfTerminationCriterion.setOutputSerializer(bulkNode.getSerializerForIterationChannel());
     
      // tell the head that it needs to wait for the solution set updates
      headConfig.setWaitForSolutionSetUpdate();
    }
   
View Full Code Here

  }
 
  private void finalizeWorksetIteration(IterationDescriptor descr) {
    final WorksetIterationPlanNode iterNode = (WorksetIterationPlanNode) descr.getIterationNode();
    final AbstractJobVertex headVertex = descr.getHeadTask();
    final TaskConfig headConfig = new TaskConfig(headVertex.getConfiguration());
    final TaskConfig headFinalOutputConfig = descr.getHeadFinalResultConfig();
   
    // ------------ finalize the head config with the final outputs and the sync gate ------------
    {
      final int numStepFunctionOuts = headConfig.getNumOutputs();
      final int numFinalOuts = headFinalOutputConfig.getNumOutputs();
     
      if (numStepFunctionOuts == 0) {
        throw new CompilerException("The workset iteration has no operation on the workset inside the step function.");
      }
     
      headConfig.setIterationHeadFinalOutputConfig(headFinalOutputConfig);
      headConfig.setIterationHeadIndexOfSyncOutput(numStepFunctionOuts + numFinalOuts);
      final double relativeMemory = iterNode.getRelativeMemoryPerSubTask();
      if (relativeMemory <= 0) {
        throw new CompilerException("Bug: No memory has been assigned to the workset iteration.");
      }
     
      headConfig.setIsWorksetIteration();
      headConfig.setRelativeBackChannelMemory(relativeMemory / 2);
      headConfig.setRelativeSolutionSetMemory(relativeMemory / 2);
     
      // set the solution set serializer and comparator
      headConfig.setSolutionSetSerializer(iterNode.getSolutionSetSerializer());
      headConfig.setSolutionSetComparator(iterNode.getSolutionSetComparator());
    }
   
    // --------------------------- create the sync task ---------------------------
    final TaskConfig syncConfig;
    {
      final AbstractJobVertex sync = new AbstractJobVertex("Sync (" + iterNode.getNodeName() + ")");
      sync.setInvokableClass(IterationSynchronizationSinkTask.class);
      sync.setParallelism(1);
      this.auxVertices.add(sync);
     
      syncConfig = new TaskConfig(sync.getConfiguration());
      syncConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, headVertex.getParallelism());
 
      // set the number of iteration / convergence criterion for the sync
      final int maxNumIterations = iterNode.getIterationNode().getIterationContract().getMaximumNumberOfIterations();
      if (maxNumIterations < 1) {
        throw new CompilerException("Cannot create workset iteration with unspecified maximum number of iterations.");
      }
      syncConfig.setNumberOfIterations(maxNumIterations);
     
      // connect the sync task
      sync.connectNewDataSetAsInput(headVertex, DistributionPattern.POINTWISE);
    }
   
    // ----------------------------- create the iteration tails -----------------------------
    // ----------------------- for next workset and solution set delta-----------------------

    {
      // we have three possible cases:
      // 1) Two tails, one for workset update, one for solution set update
      // 2) One tail for workset update, solution set update happens in an intermediate task
      // 3) One tail for solution set update, workset update happens in an intermediate task
     
      final PlanNode nextWorksetNode = iterNode.getNextWorkSetPlanNode();
      final PlanNode solutionDeltaNode = iterNode.getSolutionSetDeltaPlanNode();
     
      final boolean hasWorksetTail = nextWorksetNode.getOutgoingChannels().isEmpty();
      final boolean hasSolutionSetTail = (!iterNode.isImmediateSolutionSetUpdate()) || (!hasWorksetTail);
     
      {
        // get the vertex for the workset update
        final TaskConfig worksetTailConfig;
        AbstractJobVertex nextWorksetVertex = (AbstractJobVertex) this.vertices.get(nextWorksetNode);
        if (nextWorksetVertex == null) {
          // nextWorksetVertex is chained
          TaskInChain taskInChain = this.chainedTasks.get(nextWorksetNode);
          if (taskInChain == null) {
            throw new CompilerException("Bug: Next workset node not found as vertex or chained task.");
          }
          nextWorksetVertex = (AbstractJobVertex) taskInChain.getContainingVertex();
          worksetTailConfig = taskInChain.getTaskConfig();
        } else {
          worksetTailConfig = new TaskConfig(nextWorksetVertex.getConfiguration());
        }
       
        // mark the node to perform workset updates
        worksetTailConfig.setIsWorksetIteration();
        worksetTailConfig.setIsWorksetUpdate();
       
        if (hasWorksetTail) {
          nextWorksetVertex.setInvokableClass(IterationTailPactTask.class);
         
          worksetTailConfig.setOutputSerializer(iterNode.getWorksetSerializer());
        }
      }
      {
        final TaskConfig solutionDeltaConfig;
        AbstractJobVertex solutionDeltaVertex = (AbstractJobVertex) this.vertices.get(solutionDeltaNode);
        if (solutionDeltaVertex == null) {
          // last op is chained
          TaskInChain taskInChain = this.chainedTasks.get(solutionDeltaNode);
          if (taskInChain == null) {
            throw new CompilerException("Bug: Solution Set Delta not found as vertex or chained task.");
          }
          solutionDeltaVertex = (AbstractJobVertex) taskInChain.getContainingVertex();
          solutionDeltaConfig = taskInChain.getTaskConfig();
        } else {
          solutionDeltaConfig = new TaskConfig(solutionDeltaVertex.getConfiguration());
        }
       
        solutionDeltaConfig.setIsWorksetIteration();
        solutionDeltaConfig.setIsSolutionSetUpdate();
       
        if (hasSolutionSetTail) {
          solutionDeltaVertex.setInvokableClass(IterationTailPactTask.class);
         
          solutionDeltaConfig.setOutputSerializer(iterNode.getSolutionSetSerializer());
         
          // tell the head that it needs to wait for the solution set updates
          headConfig.setWaitForSolutionSetUpdate();
        }
        else {
          // no tail, intermediate update. must be immediate update
          if (!iterNode.isImmediateSolutionSetUpdate()) {
            throw new CompilerException("A solution set update without dedicated tail is not set to perform immediate updates.");
          }
          solutionDeltaConfig.setIsSolutionSetUpdateWithoutReprobe();
        }
      }
    }
   
    // ------------------- register the aggregators -------------------
View Full Code Here

TOP

Related Classes of org.apache.flink.runtime.operators.util.TaskConfig$DelegatingConfiguration

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.