Examples of ReduceOperator


Examples of org.apache.flink.api.java.record.operators.ReduceOperator

        .input1(iteration.getPartialSolution())
        .input2(adjacencyListInput)
        .name("Join with Edges")
        .build();
   
    ReduceOperator rankAggregation = ReduceOperator.builder(new AggregatingReduce(), LongValue.class, 0)
        .input(join)
        .name("Rank Aggregation")
        .build();
   
    iteration.setNextPartialSolution(rankAggregation);
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

    MapOperator projectOutCounts = MapOperator.builder(new ProjectOutCounts())
        .input(edges)
        .name("Project to vertex Ids only")
        .build();

    ReduceOperator buildTriads = ReduceOperator.builder(new BuildTriads(), IntValue.class, 0)
        .input(toLowerDegreeEdge)
        .name("Build Triads")
        .build();

    JoinOperator closeTriads = JoinOperator.builder(new CloseTriads(), IntValue.class, 1, 0)
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

      FileDataSource sourceNode = new FileDataSource(new TextInputFormat(), IN_FILE, "Input Lines");
      MapOperator mapNode = MapOperator.builder(new TokenizeLine())
        .input(sourceNode)
        .name("Tokenize Lines")
        .build();
      ReduceOperator reduceNode = ReduceOperator.builder(new CountWords(), StringValue.class, 0)
        .input(mapNode)
        .name("Count Words")
        .build();
      FileDataSink out = new FileDataSink(new CsvOutputFormat(), OUT_FILE, reduceNode, "Word Counts");
      CsvOutputFormat.configureRecordFormat(out)
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    // create ReduceOperator for finding the nearest cluster centers
    ReduceOperator minCandidateId = ReduceOperator.builder(new MinimumComponentIDReduce(), LongValue.class, 0)
        .input(joinWithNeighbors)
        .name("Find Minimum Candidate Id")
        .build();
   
    // create CrossOperator for distance computation
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

   
    MapOperator mapper = MapOperator.builder(new TokenizeLine())
      .input(source)
      .name("Tokenize Lines")
      .build();
    ReduceOperator reducer = ReduceOperator.builder(CountWords.class, StringValue.class, 0)
      .input(mapper)
      .name("Count Words")
      .build();
    FileDataSink out = new FileDataSink(new CsvOutputFormat(), output, reducer, "Word Counts");
    CsvOutputFormat.configureRecordFormat(out)
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

    MapOperator mapper = MapOperator.builder(new TokenizeLine())
        .input(source)
        .name("Tokenize Lines")
        .build();
    ReduceOperator reducer = ReduceOperator.builder(CountWords.class, StringValue.class, 0)
        .input(mapper)
        .name("Count Words")
        .build();
    HadoopDataSink<Text, IntWritable> out = new HadoopDataSink<Text, IntWritable>(new TextOutputFormat<Text, IntWritable>(),new JobConf(), "Hadoop TextOutputFormat", reducer, Text.class, IntWritable.class);
    TextOutputFormat.setOutputPath(out.getJobConf(), new Path(output));
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

    edges.setParameter(EdgeInputFormat.ID_DELIMITER_CHAR, delimiter);
   
    MapOperator projectEdge = MapOperator.builder(new ProjectEdge())
      .input(edges).name("Project Edge").build();
   
    ReduceOperator edgeCounter = ReduceOperator.builder(new CountEdges(), IntValue.class, 0)
      .input(projectEdge).name("Count Edges for Vertex").build();
   
    ReduceOperator countJoiner = ReduceOperator.builder(new JoinCountsAndUniquify())
      .keyField(IntValue.class, 0)
      .keyField(IntValue.class, 1)
      .input(edgeCounter)
      .name("Join Counts")
      .build();
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

   
    BulkIteration iteration = new BulkIteration("Loop");
    iteration.setInput(initialInput);
    iteration.setMaximumNumberOfIterations(NUM_ITERATIONS);

    ReduceOperator sumReduce = ReduceOperator.builder(new SumReducer())
        .input(iteration.getPartialSolution())
        .name("Compute sum (Reduce)")
        .build();
   
    iteration.setNextPartialSolution(sumReduce);
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

    BulkIteration iteration = new BulkIteration("Loop");
    iteration.setInput(initialInput);
    iteration.setMaximumNumberOfIterations(5);
    Assert.assertTrue(iteration.getMaximumNumberOfIterations() > 1);

    ReduceOperator sumReduce = ReduceOperator.builder(new SumReducer())
        .input(iteration.getPartialSolution())
        .name("Compute sum (Reduce)")
        .build();
   
    iteration.setNextPartialSolution(sumReduce);
View Full Code Here

Examples of org.apache.flink.api.java.record.operators.ReduceOperator

      MapOperator wordsSecondInput = MapOperator.builder(TokenizeLine.class)
        .input(source)
        .name("Words (Second Input)")
        .build();

      @SuppressWarnings("unchecked")
      ReduceOperator counts = ReduceOperator.builder(CountWords.class, StringValue.class, 0)
        .input(wordsFirstInput, wordsSecondInput)
        .name("Word Counts")
        .build();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.