Package storm.trident

Examples of storm.trident.TridentTopology.newStream()


    // In this state we will save the real-time counts per date for each hashtag
    StateFactory mapState = new MemoryMapState.Factory();

    // Real-time part of the system: a Trident topology that groups by hashtag and stores per-date counts
    TridentState hashTagCounts = topology
        .newStream("spout1", spout)
        // note how we carry the date around
        .each(new Fields("tweet", "date"), new Split(), new Fields("word"))
        .each(new Fields("word", "date"), new HashTagFilter(), new Fields("hashtag"))
        .groupBy(new Fields("hashtag"))
View Full Code Here


        new Values("the man went to the store and bought some candy"), new Values("four score and seven years ago"),
        new Values("how many apples can you eat"), new Values("to be or not to be the person"));
    spout.setCycle(true);

    TridentTopology topology = new TridentTopology();
    TridentState wordCounts = topology.newStream("spout1", spout).parallelismHint(16).each(new Fields("sentence"),
        new Split(), new Fields("word")).groupBy(new Fields("word")).persistentAggregate(new MemoryMapState.Factory(),
        new Count(), new Fields("count")).parallelismHint(16);

    topology.newDRPCStream("words", drpc).each(new Fields("args"), new Split(), new Fields("word")).groupBy(new Fields(
        "word")).stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count")).each(new Fields("count"),
View Full Code Here

    try {
      // Build topology
      TridentTopology toppology = new TridentTopology();

      // "Training" stream
      TridentState classifierState = toppology.newStream("reutersData", new ReutersBatchSpout())
      // Transform raw data to text instance
          .each(new Fields("label", "text"), new TextInstanceCreator<Integer>(), new Fields("instance"))

          // Update text classifier
          .partitionPersist(new MemoryMapState.Factory(), new Fields("instance"), new TextClassifierUpdater<Integer>("newsClassifier", new KLDClassifier(9)));
View Full Code Here

    try {
      // Build topology
      TridentTopology toppology = new TridentTopology();

      // Training stream
      TridentState kmeansState = toppology
        // Emit tuples with a instance containing an integer as label and 3
        // double features named (x0, x1 and x2)
        .newStream("samples", new RandomFeaturesForClusteringSpout())

        // Convert trident tuple to instance
View Full Code Here

      MemoryMapState.Factory perceptronModelStateFactory = new MemoryMapState.Factory();
      TridentState perceptronModel = toppology.newStaticState(perceptronModelStateFactory);
      TridentState perceptronEvaluation = toppology.newStaticState(evaluationStateFactory);

      // Predict
      Stream predictionStream = toppology.newStream("nandsamples", new NANDSpout()) //
          .stateQuery(perceptronModel, new Fields("instance"), new ClassifyQuery<Boolean>("perceptron"), new Fields("prediction"));

      // Update evaluation
      predictionStream //
          .persistentAggregate(evaluationStateFactory, new Fields("instance", "prediction"), new AccuracyAggregator<Boolean>(), new Fields("accuracy"));
View Full Code Here

    try {

      // Build topology
      TridentTopology toppology = new TridentTopology();

      TridentState scaledStreamStatistics = toppology
      // emit tuples with random features
          .newStream("originalStream", new RandomFeaturesSpout(false, 2, 3.0))

          // Transform trident tupl to instance
          .each(new Fields("x0", "x1"), new InstanceCreator(false), new Fields("instance"))
View Full Code Here

    try {
      // Build topology
      TridentTopology toppology = new TridentTopology();

      // Training stream
      TridentState perceptronModel = toppology.newStream("nandsamples", new NANDSpout())
      // update classifier
          .partitionPersist(new MemoryMapState.Factory(), new Fields("instance"), new ClassifierUpdater<Boolean>("perceptron", new PerceptronClassifier()));

      // Classification stream
      toppology.newDRPCStream("predict", localDRPC)
View Full Code Here

                new Values("to be or not to be the person"));
        spout.setCycle(true);
       
        TridentTopology topology = new TridentTopology();       
        TridentState wordCounts =
              topology.newStream("spout1", spout)
                .parallelismHint(16)
                .each(new Fields("sentence"), new Split(), new Fields("word"))
                .groupBy(new Fields("word"))
                .persistentAggregate(new MemoryMapState.Factory(),
                                     new Count(), new Fields("count"))        
View Full Code Here

    // Also, Spouts are "batched".
    TridentTopology topology = new TridentTopology();

    // Each primitive allows us to apply either filters or functions to the stream
    // We always have to select the input fields.
    topology.newStream("filter", spout).each(new Fields("text", "actor"), new PereTweetsFilter())
        .each(new Fields("text", "actor"), new Utils.PrintFilter());

    // Functions describe their output fields, which are always appended to the input fields.
    topology.newStream("function", spout)
        .each(new Fields("text", "actor"), new UppercaseFunction(), new Fields("uppercased_text"))
View Full Code Here

    // We always have to select the input fields.
    topology.newStream("filter", spout).each(new Fields("text", "actor"), new PereTweetsFilter())
        .each(new Fields("text", "actor"), new Utils.PrintFilter());

    // Functions describe their output fields, which are always appended to the input fields.
    topology.newStream("function", spout)
        .each(new Fields("text", "actor"), new UppercaseFunction(), new Fields("uppercased_text"))
        .each(new Fields("text", "uppercased_text"), new Utils.PrintFilter());

    // As you see, Each operations can be chained.
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.