Package eu.stratosphere.nephele.jobgraph

Examples of eu.stratosphere.nephele.jobgraph.AbstractJobVertex


    final Iterator<Map.Entry<AbstractJobVertex, ExecutionGroupVertex>> it = temporaryGroupVertexMap.entrySet()
      .iterator();
    while (it.hasNext()) {

      final Map.Entry<AbstractJobVertex, ExecutionGroupVertex> entry = it.next();
      final AbstractJobVertex jobVertex = entry.getKey();
      if (jobVertex.getVertexToShareInstancesWith() != null) {

        final AbstractJobVertex vertexToShareInstancesWith = jobVertex.getVertexToShareInstancesWith();
        final ExecutionGroupVertex groupVertex = entry.getValue();
        final ExecutionGroupVertex groupVertexToShareInstancesWith = temporaryGroupVertexMap
          .get(vertexToShareInstancesWith);
        groupVertex.shareInstancesWith(groupVertexToShareInstancesWith);
      }
View Full Code Here


    Iterator<Map.Entry<AbstractJobVertex, ExecutionVertex>> it = vertexMap.entrySet().iterator();

    while (it.hasNext()) {

      final Map.Entry<AbstractJobVertex, ExecutionVertex> entry = it.next();
      final AbstractJobVertex sjv = entry.getKey();
      final ExecutionVertex sev = entry.getValue();
      final ExecutionGroupVertex sgv = sev.getGroupVertex();

      // First compare number of output gates
      if (sjv.getNumberOfForwardConnections() != sgv.getEnvironment().getNumberOfOutputGates()) {
        throw new GraphConversionException("Job and execution vertex " + sjv.getName()
          + " have different number of outputs");
      }

      if (sjv.getNumberOfBackwardConnections() != sgv.getEnvironment().getNumberOfInputGates()) {
        throw new GraphConversionException("Job and execution vertex " + sjv.getName()
          + " have different number of inputs");
      }

      // First, build the group edges
      for (int i = 0; i < sjv.getNumberOfForwardConnections(); ++i) {
        final JobEdge edge = sjv.getForwardConnection(i);
        final AbstractJobVertex tjv = edge.getConnectedVertex();

        final ExecutionVertex tev = vertexMap.get(tjv);
        final ExecutionGroupVertex tgv = tev.getGroupVertex();
        // Use NETWORK as default channel type if nothing else is defined by the user
        ChannelType channelType = edge.getChannelType();
View Full Code Here

      if (LOG.isDebugEnabled()) {
        LOG.debug("Submitted job " + job.getName() + " is not null");
      }
 
      // Check if any vertex of the graph has null edges
      AbstractJobVertex jv = job.findVertexWithNullEdges();
      if (jv != null) {
        JobSubmissionResult result = new JobSubmissionResult(AbstractJobResult.ReturnCode.ERROR, "Vertex "
          + jv.getName() + " has at least one null edge");
        return result;
      }
 
      if (LOG.isDebugEnabled()) {
        LOG.debug("Submitted job " + job.getName() + " has no null edges");
      }
 
      // Next, check if the graph is weakly connected
      if (!job.isWeaklyConnected()) {
        JobSubmissionResult result = new JobSubmissionResult(AbstractJobResult.ReturnCode.ERROR,
          "Job graph is not weakly connected");
        return result;
      }
 
      if (LOG.isDebugEnabled()) {
        LOG.debug("The graph of job " + job.getName() + " is weakly connected");
      }
 
      // Check if job graph has cycles
      if (!job.isAcyclic()) {
        JobSubmissionResult result = new JobSubmissionResult(AbstractJobResult.ReturnCode.ERROR,
          "Job graph is not a DAG");
        return result;
      }
 
      if (LOG.isDebugEnabled()) {
        LOG.debug("The graph of job " + job.getName() + " is acyclic");
      }
 
      // Check constrains on degree
      jv = job.areVertexDegreesCorrect();
      if (jv != null) {
        JobSubmissionResult result = new JobSubmissionResult(AbstractJobResult.ReturnCode.ERROR,
          "Degree of vertex " + jv.getName() + " is incorrect");
        return result;
      }
 
      if (LOG.isDebugEnabled()) {
        LOG.debug("All vertices of job " + job.getName() + " have the correct degree");
View Full Code Here

      // return false to prevent further descend
      return false;
    }

    // the vertex to be created for the current node
    final AbstractJobVertex vertex;
    try {
      if (node instanceof SinkPlanNode) {
        vertex = createDataSinkVertex((SinkPlanNode) node);
      }
      else if (node instanceof SourcePlanNode) {
        vertex = createDataSourceVertex((SourcePlanNode) node);
      }
      else if (node instanceof BulkIterationPlanNode) {
        BulkIterationPlanNode iterationNode = (BulkIterationPlanNode) node;
        // for the bulk iteration, we skip creating anything for now. we create the graph
        // for the step function in the post visit.
       
        // check that the root of the step function has the same DOP as the iteration.
        // because the tail must have the same DOP as the head, we can only merge the last
        // operator with the tail, if they have the same DOP. not merging is currently not
        // implemented
        PlanNode root = iterationNode.getRootOfStepFunction();
        if (root.getDegreeOfParallelism() != node.getDegreeOfParallelism() ||
            root.getSubtasksPerInstance() != node.getSubtasksPerInstance())
        {
          throw new CompilerException("Error: The final operator of the step " +
              "function has a different degree of parallelism than the iteration operator itself.");
        }
       
        IterationDescriptor descr = new IterationDescriptor(iterationNode, this.iterationIdEnumerator++);
        this.iterations.put(iterationNode, descr);
        vertex = null;
      }
      else if (node instanceof WorksetIterationPlanNode) {
        WorksetIterationPlanNode iterationNode = (WorksetIterationPlanNode) node;

        // we have the same constraints as for the bulk iteration
        PlanNode nextWorkSet = iterationNode.getNextWorkSetPlanNode();
        PlanNode solutionSetDelta  = iterationNode.getSolutionSetDeltaPlanNode();
       
        if (nextWorkSet.getDegreeOfParallelism() != node.getDegreeOfParallelism() ||
          nextWorkSet.getSubtasksPerInstance() != node.getSubtasksPerInstance())
        {
          throw new CompilerException("It is currently not supported that the final operator of the step " +
              "function has a different degree of parallelism than the iteration operator itself.");
        }
        if (solutionSetDelta.getDegreeOfParallelism() != node.getDegreeOfParallelism() ||
          solutionSetDelta.getSubtasksPerInstance() != node.getSubtasksPerInstance())
        {
          throw new CompilerException("It is currently not supported that the final operator of the step " +
              "function has a different degree of parallelism than the iteration operator itself.");
        }
       
        IterationDescriptor descr = new IterationDescriptor(iterationNode, this.iterationIdEnumerator++);
        this.iterations.put(iterationNode, descr);
        vertex = null;
      }
      else if (node instanceof SingleInputPlanNode) {
        vertex = createSingleInputVertex((SingleInputPlanNode) node);
      }
      else if (node instanceof DualInputPlanNode) {
        vertex = createDualInputVertex((DualInputPlanNode) node);
      }
      else if (node instanceof NAryUnionPlanNode) {
        // skip the union for now
        vertex = null;
      }
      else if (node instanceof BulkPartialSolutionPlanNode) {
        // create a head node (or not, if it is merged into its successor)
        vertex = createBulkIterationHead((BulkPartialSolutionPlanNode) node);
      }
      else if (node instanceof SolutionSetPlanNode) {
        // this represents an access into the solution set index.
        // we do not create a vertex for the solution set here (we create the head at the workset place holder)
       
        // we adjust the joins / cogroups that go into the solution set here
        for (Channel c : node.getOutgoingChannels()) {
          DualInputPlanNode target = (DualInputPlanNode) c.getTarget();
          AbstractJobVertex accessingVertex = this.vertices.get(target);
          TaskConfig conf = new TaskConfig(accessingVertex.getConfiguration());
          int inputNum = c == target.getInput1() ? 0 : c == target.getInput2() ? 1 : -1;
         
          // sanity checks
          if (inputNum == -1) {
            throw new CompilerException();
View Full Code Here

        // inputs for initial bulk partial solution or initial workset are already connected to the iteration head in the head's post visit.
        // connect the initial solution set now.
        if (node instanceof WorksetIterationPlanNode) {
          // connect the initial solution set
          WorksetIterationPlanNode wsNode = (WorksetIterationPlanNode) node;
          AbstractJobVertex headVertex = this.iterations.get(wsNode).getHeadTask();
          TaskConfig headConfig = new TaskConfig(headVertex.getConfiguration());
          int inputIndex = headConfig.getDriverStrategy().getNumInputs();
          headConfig.setIterationHeadSolutionSetInputIndex(inputIndex);
          translateChannel(wsNode.getInitialSolutionSetInput(), inputIndex, headVertex, headConfig, false);
        }
       
        return;
      }
     
      // --------- Main Path: Translation of channels ----------
      //
      // There are two paths of translation: One for chained tasks (or merged tasks in general),
      // which do not have their own task vertex. The other for tasks that have their own vertex,
      // or are the primary task in a vertex (to which the others are chained).
     
      final AbstractJobVertex targetVertex = this.vertices.get(node);
     
      // check whether this node has its own task, or is merged with another one
      if (targetVertex == null) {
        // node's task is merged with another task. it is either chained, of a merged head vertex
        // from an iteration
        final TaskInChain chainedTask;
        if ((chainedTask = this.chainedTasks.get(node)) != null) {
          // Chained Task. Sanity check first...
          final Iterator<Channel> inConns = node.getInputs();
          if (!inConns.hasNext()) {
            throw new CompilerException("Bug: Found chained task with no input.");
          }
          final Channel inConn = inConns.next();
         
          if (inConns.hasNext()) {
            throw new CompilerException("Bug: Found a chained task with more than one input!");
          }
          if (inConn.getLocalStrategy() != null && inConn.getLocalStrategy() != LocalStrategy.NONE) {
            throw new CompilerException("Bug: Found a chained task with an input local strategy.");
          }
          if (inConn.getShipStrategy() != null && inConn.getShipStrategy() != ShipStrategyType.FORWARD) {
            throw new CompilerException("Bug: Found a chained task with an input ship strategy other than FORWARD.");
          }
 
          AbstractJobVertex container = chainedTask.getContainingVertex();
         
          if (container == null) {
            final PlanNode sourceNode = inConn.getSource();
            container = this.vertices.get(sourceNode);
            if (container == null) {
              // predecessor is itself chained
              container = this.chainedTasks.get(sourceNode).getContainingVertex();
              if (container == null) {
                throw new IllegalStateException("Bug: Chained task predecessor has not been assigned its containing vertex.");
              }
            } else {
              // predecessor is a proper task job vertex and this is the first chained task. add a forward connection entry.
              new TaskConfig(container.getConfiguration()).addOutputShipStrategy(ShipStrategyType.FORWARD);
            }
            chainedTask.setContainingVertex(container);
          }
         
          // add info about the input serializer type
          chainedTask.getTaskConfig().setInputSerializer(inConn.getSerializer(), 0);
         
          // update name of container task
          String containerTaskName = container.getName();
          if(containerTaskName.startsWith("CHAIN ")) {
            container.setName(containerTaskName+" -> "+chainedTask.getTaskName());
          } else {
            container.setName("CHAIN "+containerTaskName+" -> "+chainedTask.getTaskName());
          }
         
          this.chainedTasksInSequence.add(chainedTask);
          return;
        }
View Full Code Here

      } else if (!typeSerFact.equals(inConn.getSerializer())) {
        throw new CompilerException("Conflicting types in union operator.");
      }
     
      final PlanNode sourceNode = inConn.getSource();
      AbstractJobVertex sourceVertex = this.vertices.get(sourceNode);
      TaskConfig sourceVertexConfig;

      if (sourceVertex == null) {
        // this predecessor is chained to another task or an iteration
        final TaskInChain chainedTask;
        final IterationDescriptor iteration;
        if ((chainedTask = this.chainedTasks.get(sourceNode)) != null) {
          // push chained task
          if (chainedTask.getContainingVertex() == null) {
            throw new IllegalStateException("Bug: Chained task has not been assigned its containing vertex when connecting.");
          }
          sourceVertex = chainedTask.getContainingVertex();
          sourceVertexConfig = chainedTask.getTaskConfig();
        } else if ((iteration = this.iterations.get(sourceNode)) != null) {
          // predecessor is an iteration
          sourceVertex = iteration.getHeadTask();
          sourceVertexConfig = iteration.getHeadFinalResultConfig();
        } else {
          throw new CompilerException("Bug: Could not resolve source node for a channel.");
        }
      } else {
        // predecessor is its own vertex
        sourceVertexConfig = new TaskConfig(sourceVertex.getConfiguration());
      }
      DistributionPattern pattern = connectJobVertices(
        inConn, inputIndex, sourceVertex, sourceVertexConfig, targetVertex, targetVertexConfig, isBroadcast);
     
      // accounting on channels and senders
      numChannelsTotal++;
      if (inConn.isOnDynamicPath()) {
        numChannelsDynamicPath++;
        numDynamicSenderTasksTotal += getNumberOfSendersPerReceiver(pattern,
          sourceVertex.getNumberOfSubtasks(), targetVertex.getNumberOfSubtasks());
      }
    }
   
    // for the iterations, check that the number of dynamic channels is the same as the number
    // of channels for this logical input. this condition is violated at the moment, if there
View Full Code Here

TOP

Related Classes of eu.stratosphere.nephele.jobgraph.AbstractJobVertex

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.