use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class WordCountTridentRedisCluster method buildTopology.
public static StormTopology buildTopology(String redisHostPort) {
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
spout.setCycle(true);
Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>();
for (String hostPort : redisHostPort.split(",")) {
String[] hostPortSplit = hostPort.split(":");
nodes.add(new InetSocketAddress(hostPortSplit[0], Integer.valueOf(hostPortSplit[1])));
}
JedisClusterConfig clusterConfig = new JedisClusterConfig.Builder().setNodes(nodes).build();
RedisStoreMapper storeMapper = new WordCountStoreMapper();
RedisLookupMapper lookupMapper = new WordCountLookupMapper();
RedisClusterState.Factory factory = new RedisClusterState.Factory(clusterConfig);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
stream.partitionPersist(factory, fields, new RedisClusterStateUpdater(storeMapper).withExpire(86400000), new Fields());
TridentState state = topology.newStaticState(factory);
stream = stream.stateQuery(state, new Fields("word"), new RedisClusterStateQuerier(lookupMapper), new Fields("columnName", "columnValue"));
stream.each(new Fields("word", "columnValue"), new PrintFunction(), new Fields());
return topology.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class SolrJsonTridentTopology method getTopology.
@Override
protected StormTopology getTopology() throws IOException {
final TridentTopology topology = new TridentTopology();
final SolrJsonSpout spout = new SolrJsonSpout();
final Stream stream = topology.newStream("SolrJsonSpout", spout);
final StateFactory solrStateFactory = new SolrStateFactory(getSolrConfig(), getSolrMapper());
stream.partitionPersist(solrStateFactory, spout.getOutputFields(), new SolrUpdater(), new Fields());
return topology.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class WordCountTrident method buildTopology.
public static StormTopology buildTopology(String nameserverAddr, String topic) {
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
spout.setCycle(true);
TupleToMessageMapper mapper = new FieldNameBasedTupleToMessageMapper("word", "count");
TopicSelector selector = new DefaultTopicSelector(topic);
Properties properties = new Properties();
properties.setProperty(RocketMqConfig.NAME_SERVER_ADDR, nameserverAddr);
RocketMqState.Options options = new RocketMqState.Options().withMapper(mapper).withSelector(selector).withProperties(properties);
StateFactory factory = new RocketMqStateFactory(options);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
stream.partitionPersist(factory, fields, new RocketMqStateUpdater(), new Fields());
return topology.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class SolrFieldsTridentTopology method getTopology.
@Override
protected StormTopology getTopology() throws IOException {
final TridentTopology tridentTopology = new TridentTopology();
final SolrFieldsSpout spout = new SolrFieldsSpout();
final Stream stream = tridentTopology.newStream("SolrFieldsSpout", spout);
SolrConfig solrConfig = getSolrConfig();
final StateFactory solrStateFactory = new SolrStateFactory(solrConfig, getSolrMapper(solrConfig));
stream.partitionPersist(solrStateFactory, spout.getOutputFields(), new SolrUpdater(), new Fields());
return tridentTopology.build();
}
use of org.apache.storm.trident.TridentTopology in project storm by apache.
the class MapStateTest method wordsTest.
public void wordsTest(StateFactory factory) throws Exception {
FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence"), 3, new Values("the cow jumped over the moon"), new Values("the man went to the store and bought some candy"), new Values("four score and seven years ago"), new Values("how many apples can you eat"));
spout.setCycle(false);
TridentTopology topology = new TridentTopology();
TridentState wordCounts = topology.newStream("spout1", spout).each(new Fields("sentence"), new Split(), new Fields("word")).groupBy(new Fields("word")).persistentAggregate(factory, new Count(), new Fields("state")).parallelismHint(1);
LocalCluster cluster = new LocalCluster();
LocalDRPC client = new LocalDRPC(cluster.getMetricRegistry());
topology.newDRPCStream("words", client).each(new Fields("args"), new Split(), new Fields("word")).groupBy(new Fields("word")).stateQuery(wordCounts, new Fields("word"), new MapGet(), new Fields("state")).each(new Fields("state"), new FilterNull()).aggregate(new Fields("state"), new Sum(), new Fields("sum"));
logger.info("Submitting topology.");
cluster.submitTopology("test", new HashMap(), topology.build());
logger.info("Waiting for something to happen.");
int count;
do {
Thread.sleep(2000);
count = session.execute(QueryBuilder.select().all().from("words_ks", "words_table")).getAvailableWithoutFetching();
logger.info("Found {} records", count);
} while (count < 24);
logger.info("Starting queries.");
// 5
assertEquals("[[5]]", client.execute("words", "cat dog the man"));
// 0
assertEquals("[[0]]", client.execute("words", "cat"));
// 0
assertEquals("[[0]]", client.execute("words", "dog"));
// 4
assertEquals("[[4]]", client.execute("words", "the"));
// 1
assertEquals("[[1]]", client.execute("words", "man"));
cluster.shutdown();
}
Aggregations