Package storm.trident

Examples of storm.trident.TridentTopology.build()


        .groupBy(new Fields("shortid", "cf", "cq"))
        .persistentAggregate(state, new Count(), new Fields("count"));

    Config conf = new Config();
    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology("hbase-trident-aggregate", conf, topology.build());

    Utils.sleep(5000);
    cluster.shutdown();
  }
}
View Full Code Here


        new Fields("shortid", "url", "user", "date"), new HBaseValueUpdater());

    Config conf = new Config();
    LocalCluster cluster = new LocalCluster();

    cluster.submitTopology("hbase-trident-updater", conf, topology.build());
    Utils.sleep(10000);
    cluster.shutdown();
  }
}
View Full Code Here

                .stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count"))
                .each(new Fields("count"), new FilterNull())
                //.aggregate(new Fields("count"), new Sum(), new Fields("sum"))
                ;
       
        return topology.build();
    }
   
    public static void main(String[] args) throws Exception {
        Config conf = new Config();
        conf.setMaxSpoutPending(20);
View Full Code Here

                .stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count"))
                .each(new Fields("count"), new FilterNull())
                //.aggregate(new Fields("count"), new Sum(), new Fields("sum"))
                ;
       
        return topology.build();
    }
   
    public static void main(String[] args) throws Exception {
        Config conf = new Config();
        conf.setMaxSpoutPending(20);
View Full Code Here

                .stateQuery(tweetersToFollowers, new Fields("tweeter"), new MapGet(), new Fields("followers"))
                .each(new Fields("followers"), new ExpandList(), new Fields("follower"))
                .groupBy(new Fields("follower"))
                .aggregate(new One(), new Fields("one"))
                .aggregate(new Fields("one"), new Sum(), new Fields("reach"));
        return topology.build();
    }
   
    public static void main(String[] args) throws Exception {
        LocalDRPC drpc = new LocalDRPC();
View Full Code Here

        StateFactory factory = new HdfsStateFactory().withOptions(options);

        TridentState state = stream
                .partitionPersist(factory, hdfsFields, new HdfsUpdater(), new Fields());

        return topology.build();
    }

    public static void main(String[] args) throws Exception {
        Config conf = new Config();
        conf.setMaxSpoutPending(5);
View Full Code Here

        StateFactory factory = new HdfsStateFactory().withOptions(seqOpts);

        TridentState state = stream
                .partitionPersist(factory, hdfsFields, new HdfsUpdater(), new Fields());

        return topology.build();
    }

    public static void main(String[] args) throws Exception {
        Config conf = new Config();
        conf.setMaxSpoutPending(5);
View Full Code Here

        TridentKafkaStateFactory stateFactory = new TridentKafkaStateFactory()
                .withKafkaTopicSelector(new DefaultTopicSelector("test"))
                .withTridentTupleToKafkaMapper(new FieldNameBasedTupleToKafkaMapper("word", "count"));
        stream.partitionPersist(stateFactory, fields, new TridentKafkaUpdater(), new Fields());

        return topology.build();
    }

    /**
     * To run this topology ensure you have a kafka broker running and provide connection string to broker as argument.
     * Create a topic test with command line,
View Full Code Here

                .stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count"))
                .each(new Fields("count"), new FilterNull())
                .aggregate(new Fields("count"), new Sum(), new Fields("sum"));

        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("test", config, topology.build());

            Thread.sleep(10000);


            assertEquals("[[5]]", client.execute("words", "cat dog the man")); // 5
View Full Code Here

        new Count(), new Fields("count")).parallelismHint(16);

    topology.newDRPCStream("words", drpc).each(new Fields("args"), new Split(), new Fields("word")).groupBy(new Fields(
        "word")).stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("count")).each(new Fields("count"),
        new FilterNull()).aggregate(new Fields("count"), new Sum(), new Fields("sum"));
    return topology.build();
  }

  public static void main(String[] args) throws Exception {
    Config conf = new Config();
    conf.setMaxSpoutPending(20);
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.