use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class TridentReach method buildTopology.
public static StormTopology buildTopology() {
TridentTopology topology = new TridentTopology();
TridentState urlToTweeters = topology.newStaticState(new StaticSingleKeyMapState.Factory(TWEETERS_DB));
TridentState tweetersToFollowers = topology.newStaticState(new StaticSingleKeyMapState.Factory(FOLLOWERS_DB));
topology.newDRPCStream("reach").stateQuery(urlToTweeters, new Fields("args"), new MapGet(), new Fields("tweeters")).each(new Fields("tweeters"), new ExpandList(), new Fields("tweeter")).shuffle().stateQuery(tweetersToFollowers, new Fields("tweeter"), new MapGet(), new Fields("followers")).each(new Fields("followers"), new ExpandList(), new Fields("follower")).groupBy(new Fields("follower")).aggregate(new One(), new Fields("one")).aggregate(new Fields("one"), new Sum(), new Fields("reach"));
return topology.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class TridentTopologySource method getTopology.
public StormTopology getTopology(Config config) {
this.spout = new FixedBatchSpout(new Fields("sentence"), 20, new Values("one two"), new Values("two three"), new Values("three four"), new Values("four five"), new Values("five six"));
TridentTopology trident = new TridentTopology();
trident.newStream("wordcount", spout).name("sentence").parallelismHint(1).shuffle().each(new Fields("sentence"), new Split(), new Fields("word")).parallelismHint(1).groupBy(new Fields("word")).persistentAggregate(new MemoryMapState.Factory(), new Count(), new Fields("count")).parallelismHint(1);
return trident.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class WordCountTridentRedisClusterMap method buildTopology.
public static StormTopology buildTopology(String redisHostPort) {
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
spout.setCycle(true);
Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>();
for (String hostPort : redisHostPort.split(",")) {
String[] hostPortSplit = hostPort.split(":");
nodes.add(new InetSocketAddress(hostPortSplit[0], Integer.valueOf(hostPortSplit[1])));
}
JedisClusterConfig clusterConfig = new JedisClusterConfig.Builder().setNodes(nodes).build();
RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
StateFactory factory = RedisClusterMapState.transactional(clusterConfig, dataTypeDescription);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
TridentState state = stream.groupBy(new Fields("word")).persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum")).each(new Fields("word", "sum"), new PrintFunction(), new Fields());
return topology.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class WordCountTridentRedisMap method buildTopology.
public static StormTopology buildTopology(String redisHost, Integer redisPort) {
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
spout.setCycle(true);
JedisPoolConfig poolConfig = new JedisPoolConfig.Builder().setHost(redisHost).setPort(redisPort).build();
RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
StateFactory factory = RedisMapState.transactional(poolConfig, dataTypeDescription);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
TridentState state = stream.groupBy(new Fields("word")).persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum")).each(new Fields("word", "sum"), new PrintFunction(), new Fields());
return topology.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class WordCountTridentMap method buildTopology.
public static StormTopology buildTopology(String url, String collectionName) {
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
spout.setCycle(true);
MongoMapper mapper = new SimpleMongoMapper().withFields("word", "count");
MongoMapState.Options options = new MongoMapState.Options();
options.url = url;
options.collectionName = collectionName;
options.mapper = mapper;
QueryFilterCreator filterCreator = new SimpleQueryFilterCreator().withField("word");
options.queryCreator = filterCreator;
StateFactory factory = MongoMapState.transactional(options);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
TridentState state = stream.groupBy(new Fields("word")).persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum")).each(new Fields("word", "sum"), new PrintFunction(), new Fields());
return topology.build();
}
Aggregations