Package org.apache.flink.api.java.record.operators

Examples of org.apache.flink.api.java.record.operators.DeltaIteration$SolutionSetPlaceHolder


    if (vertexKey == null || vertexValue == null || messageType == null || edgeValue == null) {
      throw new RuntimeException();
    }
 
    // instantiate the data flow
    this.iteration = new DeltaIteration(0, name);
   
    this.messager = CoGroupOperator.builder(MessagingDriver.class, vertexKey, 0, 0)
      .input2(iteration.getWorkset())
      .name("Message Sender")
      .build();
View Full Code Here


    FileDataSource initialDeltaSet = new FileDataSource(new CsvInputFormat(' ', LongValue.class, DoubleValue.class), deltasInput, "Initial DeltaSet");
       
    // create DataSourceContract for the edges
    FileDataSource dependencySet = new FileDataSource(new CsvInputFormat(' ', LongValue.class, LongValue.class, LongValue.class), dependencySetInput, "Dependency Set");
   
    DeltaIteration iteration = new DeltaIteration(0, "Delta PageRank");
    iteration.setInitialSolutionSet(initialSolutionSet);
    iteration.setInitialWorkset(initialDeltaSet);
    iteration.setMaximumNumberOfIterations(maxIterations);
   
    JoinOperator dependenciesMatch = JoinOperator.builder(PRDependenciesComputationMatchDelta.class,
        LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(dependencySet)
        .name("calculate dependencies")
        .build();
   
    ReduceOperator updateRanks = ReduceOperator.builder(UpdateRankReduceDelta.class, LongValue.class, 0)
        .input(dependenciesMatch)
        .name("update ranks")
        .build();
   
    JoinOperator oldRankComparison = JoinOperator.builder(RankComparisonMatch.class, LongValue.class, 0, 0)
        .input1(updateRanks)
        .input2(iteration.getSolutionSet())
        .name("comparison with old ranks")
        .build();

    iteration.setNextWorkset(updateRanks);
    iteration.setSolutionSetDelta(oldRankComparison);
   
    // create DataSinkContract for writing the final ranks
    FileDataSink result = new FileDataSink(CsvOutputFormat.class, output, iteration, "Final Ranks");
    CsvOutputFormat.configureRecordFormat(result)
      .recordDelimiter('\n')
View Full Code Here

    // create DataSourceContract for the vertices
    FileDataSource initialVertices = new FileDataSource(new CsvInputFormat(' ', LongValue.class), verticesInput, "Vertices");
   
    MapOperator verticesWithId = MapOperator.builder(DuplicateLongMap.class).input(initialVertices).name("Assign Vertex Ids").build();
   
    DeltaIteration iteration = new DeltaIteration(0, "Connected Components Iteration");
    iteration.setInitialSolutionSet(verticesWithId);
    iteration.setInitialWorkset(verticesWithId);
    iteration.setMaximumNumberOfIterations(maxIterations);
   
    // create DataSourceContract for the edges
    FileDataSource edges = new FileDataSource(new CsvInputFormat(' ', LongValue.class, LongValue.class), edgeInput, "Edges");

    // create CrossOperator for distance computation
    JoinOperator joinWithNeighbors = JoinOperator.builder(new NeighborWithComponentIDJoin(), LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    // create ReduceOperator for finding the nearest cluster centers
    ReduceOperator minCandidateId = ReduceOperator.builder(new MinimumComponentIDReduce(), LongValue.class, 0)
        .input(joinWithNeighbors)
        .name("Find Minimum Candidate Id")
        .build();
   
    // create CrossOperator for distance computation
    JoinOperator updateComponentId = JoinOperator.builder(new UpdateComponentIdMatchMirrored(), LongValue.class, 0, 0)
        .input1(iteration.getSolutionSet())
        .input2(minCandidateId)
        .name("Update Component Id")
        .build();
   
    iteration.setNextWorkset(updateComponentId);
    iteration.setSolutionSetDelta(updateComponentId);

    // create DataSinkContract for writing the new cluster positions
    FileDataSink result = new FileDataSink(new CsvOutputFormat(), output, iteration, "Result");
    CsvOutputFormat.configureRecordFormat(result)
      .recordDelimiter('\n')
View Full Code Here

    // data source for initial vertices
    FileDataSource initialVertices = new FileDataSource(new CsvInputFormat(' ', LongValue.class), verticesInput, "Vertices");
   
    MapOperator verticesWithId = MapOperator.builder(DuplicateLongMap.class).input(initialVertices).name("Assign Vertex Ids").build();
   
    DeltaIteration iteration = new DeltaIteration(0, "Connected Components Iteration");
    iteration.setInitialSolutionSet(verticesWithId);
    iteration.setInitialWorkset(verticesWithId);
    iteration.setMaximumNumberOfIterations(maxIterations);
   
    // create DataSourceContract for the edges
    FileDataSource edges = new FileDataSource(new CsvInputFormat(' ', LongValue.class, LongValue.class), edgeInput, "Edges");

    // create CrossOperator for distance computation
    JoinOperator joinWithNeighbors = JoinOperator.builder(new NeighborWithComponentIDJoin(), LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    CoGroupOperator minAndUpdate = CoGroupOperator.builder(new MinIdAndUpdate(), LongValue.class, 0, 0)
        .input1(joinWithNeighbors)
        .input2(iteration.getSolutionSet())
        .name("Min Id and Update")
        .build();
   
    iteration.setNextWorkset(minAndUpdate);
    iteration.setSolutionSetDelta(minAndUpdate);

    // create DataSinkContract for writing the new cluster positions
    FileDataSink result = new FileDataSink(new CsvOutputFormat(), output, iteration, "Result");
    CsvOutputFormat.configureRecordFormat(result)
      .recordDelimiter('\n')
View Full Code Here

    // data source for initial vertices
    FileDataSource initialVertices = new FileDataSource(new CsvInputFormat(' ', LongValue.class), verticesInput, "Vertices");
   
    MapOperator verticesWithId = MapOperator.builder(DuplicateLongMap.class).input(initialVertices).name("Assign Vertex Ids").build();
   
    DeltaIteration iteration = new DeltaIteration(0, "Connected Components Iteration");
    iteration.setInitialSolutionSet(verticesWithId);
    iteration.setInitialWorkset(verticesWithId);
    iteration.setMaximumNumberOfIterations(maxIterations);
   
    // create DataSourceContract for the edges
    FileDataSource edges = new FileDataSource(new CsvInputFormat(' ', LongValue.class, LongValue.class), edgeInput, "Edges");

    // create CrossOperator for distance computation
    JoinOperator joinWithNeighbors = JoinOperator.builder(new NeighborWithComponentIDJoin(), LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    CoGroupOperator minAndUpdate = CoGroupOperator.builder(new MinIdAndUpdate(), LongValue.class, 0, 0)
        .input1(joinWithNeighbors)
        .input2(iteration.getSolutionSet())
        .name("Min Id and Update")
        .build();
   
    iteration.setNextWorkset(minAndUpdate);
    iteration.setSolutionSetDelta(minAndUpdate);

    // create DataSinkContract for writing the new cluster positions
    FileDataSink result = new FileDataSink(new CsvOutputFormat(), output, iteration, "Result");
    CsvOutputFormat.configureRecordFormat(result)
      .recordDelimiter('\n')
View Full Code Here

   
    MapOperator verticesWithId = MapOperator.builder(DuplicateLongMap.class).input(initialVertices).name("Assign Vertex Ids").build();
   
    // the loop takes the vertices as the solution set and changed vertices as the workset
    // initially, all vertices are changed
    DeltaIteration iteration = new DeltaIteration(0, "Connected Components Iteration");
    iteration.setInitialSolutionSet(verticesWithId);
    iteration.setInitialWorkset(verticesWithId);
    iteration.setMaximumNumberOfIterations(maxIterations);
   
    // data source for the edges
    FileDataSource edges = new FileDataSource(new CsvInputFormat(' ', LongValue.class, LongValue.class), edgeInput, "Edges");

    // join workset (changed vertices) with the edges to propagate changes to neighbors
    JoinOperator joinWithNeighbors = JoinOperator.builder(new NeighborWithComponentIDJoin(), LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    // find for each neighbor the smallest of all candidates
    ReduceOperator minCandidateId = ReduceOperator.builder(new MinimumComponentIDReduce(), LongValue.class, 0)
        .input(joinWithNeighbors)
        .name("Find Minimum Candidate Id")
        .build();
   
    // join candidates with the solution set and update if the candidate component-id is smaller
    JoinOperator updateComponentId = JoinOperator.builder(new UpdateComponentIdMatchNonPreserving(), LongValue.class, 0, 0)
        .input1(minCandidateId)
        .input2(iteration.getSolutionSet())
        .name("Update Component Id")
        .build();
   
    if (extraMap) {
      MapOperator mapper = MapOperator.builder(IdentityMap.class).input(updateComponentId).name("idmap").build();
      iteration.setSolutionSetDelta(mapper);
    } else {
      iteration.setSolutionSetDelta(updateComponentId);
    }
   
    iteration.setNextWorkset(updateComponentId);

    // sink is the iteration result
    FileDataSink result = new FileDataSink(new CsvOutputFormat("\n", " ", LongValue.class, LongValue.class), output, iteration, "Result");

    // return the PACT plan
View Full Code Here

   
    MapOperator verticesWithId = MapOperator.builder(DuplicateLongMap.class).input(initialVertices).name("Assign Vertex Ids").build();
   
    // the loop takes the vertices as the solution set and changed vertices as the workset
    // initially, all vertices are changed
    DeltaIteration iteration = new DeltaIteration(0, "Connected Components Iteration");
    iteration.setInitialSolutionSet(verticesWithId);
    iteration.setInitialWorkset(verticesWithId);
    iteration.setMaximumNumberOfIterations(maxIterations);
   
    // data source for the edges
    FileDataSource edges = new FileDataSource(new CsvInputFormat(' ', LongValue.class, LongValue.class), edgeInput, "Edges");

    // join workset (changed vertices) with the edges to propagate changes to neighbors
    JoinOperator joinWithNeighbors = JoinOperator.builder(new NeighborWithComponentIDJoin(), LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    // find for each neighbor the smallest of all candidates
    ReduceOperator minCandidateId = ReduceOperator.builder(new MinimumComponentIDReduce(), LongValue.class, 0)
        .input(joinWithNeighbors)
        .name("Find Minimum Candidate Id")
        .build();
   
    // join candidates with the solution set and update if the candidate component-id is smaller
    JoinOperator updateComponentId = JoinOperator.builder(new UpdateComponentIdMatch(), LongValue.class, 0, 0)
        .input1(minCandidateId)
        .input2(iteration.getSolutionSet())
        .name("Update Component Id")
        .build();
   
    iteration.setNextWorkset(updateComponentId);
    iteration.setSolutionSetDelta(updateComponentId);

    // sink is the iteration result
    FileDataSink result = new FileDataSink(new CsvOutputFormat(), output, iteration, "Result");
    CsvOutputFormat.configureRecordFormat(result)
      .recordDelimiter('\n')
View Full Code Here

    // data source for initial vertices
    FileDataSource initialVertices = new FileDataSource(new CsvInputFormat(' ', LongValue.class), verticesInput, "Vertices");
   
    MapOperator verticesWithId = MapOperator.builder(DuplicateLongMap.class).input(initialVertices).name("Assign Vertex Ids").build();
   
    DeltaIteration iteration = new DeltaIteration(0, "Connected Components Iteration");
    iteration.setInitialSolutionSet(verticesWithId);
    iteration.setInitialWorkset(verticesWithId);
    iteration.setMaximumNumberOfIterations(maxIterations);
   
    // create DataSourceContract for the edges
    FileDataSource edges = new FileDataSource(new CsvInputFormat(' ', LongValue.class, LongValue.class), edgeInput, "Edges");

    // create CrossOperator for distance computation
    JoinOperator joinWithNeighbors = JoinOperator.builder(new NeighborWithComponentIDJoin(), LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    // create ReduceOperator for finding the nearest cluster centers
    ReduceOperator minCandidateId = ReduceOperator.builder(new MinimumComponentIDReduce(), LongValue.class, 0)
        .input(joinWithNeighbors)
        .name("Find Minimum Candidate Id")
        .build();
   
    // create CrossOperator for distance computation
    JoinOperator updateComponentId = JoinOperator.builder(new UpdateComponentIdMatchMirrored(), LongValue.class, 0, 0)
        .input1(iteration.getSolutionSet())
        .input2(minCandidateId)
        .name("Update Component Id")
        .build();
   
    iteration.setNextWorkset(updateComponentId);
    iteration.setSolutionSetDelta(updateComponentId);

    // create DataSinkContract for writing the new cluster positions
    FileDataSink result = new FileDataSink(new CsvOutputFormat(), output, iteration, "Result");
    CsvOutputFormat.configureRecordFormat(result)
      .recordDelimiter('\n')
View Full Code Here

    FileDataSource solutionSetInput = new FileDataSource(new DummyInputFormat(), IN_FILE, "Solution Set");
    FileDataSource worksetInput = new FileDataSource(new DummyInputFormat(), IN_FILE, "Workset");
   
    FileDataSource invariantInput = new FileDataSource(new DummyInputFormat(), IN_FILE, "Invariant Input");
   
    DeltaIteration iteration = new DeltaIteration(0, ITERATION_NAME);
    iteration.setInitialSolutionSet(solutionSetInput);
    iteration.setInitialWorkset(worksetInput);
    iteration.setMaximumNumberOfIterations(100);

    JoinOperator joinWithInvariant = JoinOperator.builder(new DummyMatchStub(), LongValue.class, 0, 0)
        .input1(iteration.getWorkset())
        .input2(invariantInput)
        .name(JOIN_WITH_INVARIANT_NAME)
        .build();

    JoinOperator joinWithSolutionSet = JoinOperator.builder(
        joinPreservesSolutionSet ? new DummyMatchStub() : new DummyNonPreservingMatchStub(), LongValue.class, 0, 0)
        .input1(iteration.getSolutionSet())
        .input2(joinWithInvariant)
        .name(JOIN_WITH_SOLUTION_SET)
        .build();
   
    ReduceOperator nextWorkset = ReduceOperator.builder(new IdentityReduce(), LongValue.class, 0)
        .input(joinWithSolutionSet)
        .name(NEXT_WORKSET_REDUCER_NAME)
        .build();
   
    if (mapBeforeSolutionDelta) {
      MapOperator mapper = MapOperator.builder(new IdentityMap())
        .input(joinWithSolutionSet)
        .name(SOLUTION_DELTA_MAPPER_NAME)
        .build();
      iteration.setSolutionSetDelta(mapper);
    } else {
      iteration.setSolutionSetDelta(joinWithSolutionSet);
    }
   
    iteration.setNextWorkset(nextWorkset);

    FileDataSink sink = new FileDataSink(new DummyOutputFormat(), OUT_FILE, iteration, "Sink");
   
    Plan plan = new Plan(sink);
    plan.setDefaultParallelism(DEFAULT_PARALLELISM);
View Full Code Here

    FileDataSource sourceC = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 3");

    FileDataSink sink1 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, sourceA, "Sink 1");
    FileDataSink sink2 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, sourceC, "Sink 2");

    DeltaIteration iteration = new DeltaIteration(0, "Loop");
    iteration.setInitialSolutionSet(sourceA);
    iteration.setInitialWorkset(sourceB);
    iteration.setMaximumNumberOfIterations(10);

    CrossOperator nextWorkset = CrossOperator.builder(DummyCrossStub.class).name("Next workset").
        input1(iteration.getWorkset()).
        input2(sourceC).
        build();

    JoinOperator solutionSetDelta = JoinOperator.builder(DummyMatchStub.class, LongValue.class,0,0).
        name("Next solution set.").
        input1(nextWorkset).
        input2(iteration.getSolutionSet()).
        build();

    iteration.setNextWorkset(nextWorkset);
    iteration.setSolutionSetDelta(solutionSetDelta);

    FileDataSink sink3 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, iteration, "Sink 3");

    List<FileDataSink> sinks = new ArrayList<FileDataSink>();
    sinks.add(sink1);
View Full Code Here

TOP

Related Classes of org.apache.flink.api.java.record.operators.DeltaIteration$SolutionSetPlaceHolder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.