use of org.apache.storm.tuple.Fields in project storm by apache.
the class AbstractUserTopology method execute.
public void execute(String[] args) throws Exception {
if (args.length != 4 && args.length != 5) {
System.out.println("Usage: " + this.getClass().getSimpleName() + " <dataSourceClassName> <dataSource.url> " + "<user> <password> [topology name]");
System.exit(-1);
}
Map map = Maps.newHashMap();
//com.mysql.jdbc.jdbc2.optional.MysqlDataSource
map.put("dataSourceClassName", args[0]);
//jdbc:mysql://localhost/test
map.put("dataSource.url", args[1]);
//root
map.put("dataSource.user", args[2]);
if (args.length == 4) {
//password
map.put("dataSource.password", args[3]);
}
Config config = new Config();
config.put(JDBC_CONF, map);
ConnectionProvider connectionProvider = new HikariCPConnectionProvider(map);
connectionProvider.prepare();
int queryTimeoutSecs = 60;
JdbcClient jdbcClient = new JdbcClient(connectionProvider, queryTimeoutSecs);
for (String sql : setupSqls) {
jdbcClient.executeSql(sql);
}
this.userSpout = new UserSpout();
this.jdbcMapper = new SimpleJdbcMapper(TABLE_NAME, connectionProvider);
connectionProvider.cleanup();
Fields outputFields = new Fields("user_id", "user_name", "dept_name", "create_date");
List<Column> queryParamColumns = Lists.newArrayList(new Column("user_id", Types.INTEGER));
this.jdbcLookupMapper = new SimpleJdbcLookupMapper(outputFields, queryParamColumns);
this.connectionProvider = new HikariCPConnectionProvider(map);
if (args.length == 4) {
try (LocalCluster cluster = new LocalCluster();
LocalTopology topo = cluster.submitTopology("test", config, getTopology())) {
Thread.sleep(30000);
}
System.exit(0);
} else {
StormSubmitter.submitTopology(args[4], config, getTopology());
}
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class UserPersistanceTridentTopology method getTopology.
@Override
public StormTopology getTopology() {
TridentTopology topology = new TridentTopology();
JdbcState.Options options = new JdbcState.Options().withConnectionProvider(connectionProvider).withMapper(this.jdbcMapper).withJdbcLookupMapper(new SimpleJdbcLookupMapper(new Fields("dept_name"), Lists.newArrayList(new Column("user_id", Types.INTEGER)))).withTableName(TABLE_NAME).withSelectQuery(SELECT_QUERY);
JdbcStateFactory jdbcStateFactory = new JdbcStateFactory(options);
Stream stream = topology.newStream("userSpout", new UserSpout());
TridentState state = topology.newStaticState(jdbcStateFactory);
stream = stream.stateQuery(state, new Fields("user_id", "user_name", "create_date"), new JdbcQuery(), new Fields("dept_name"));
stream.partitionPersist(jdbcStateFactory, new Fields("user_id", "user_name", "dept_name", "create_date"), new JdbcUpdater(), new Fields());
return topology.build();
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class WordCountTridentRedisClusterMap method buildTopology.
public static StormTopology buildTopology(String redisHostPort) {
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
spout.setCycle(true);
Set<InetSocketAddress> nodes = new HashSet<InetSocketAddress>();
for (String hostPort : redisHostPort.split(",")) {
String[] host_port = hostPort.split(":");
nodes.add(new InetSocketAddress(host_port[0], Integer.valueOf(host_port[1])));
}
JedisClusterConfig clusterConfig = new JedisClusterConfig.Builder().setNodes(nodes).build();
RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
StateFactory factory = RedisClusterMapState.transactional(clusterConfig, dataTypeDescription);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
TridentState state = stream.groupBy(new Fields("word")).persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum")).each(new Fields("word", "sum"), new PrintFunction(), new Fields());
return topology.build();
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class WordCountTridentRedisMap method buildTopology.
public static StormTopology buildTopology(String redisHost, Integer redisPort) {
Fields fields = new Fields("word", "count");
FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
spout.setCycle(true);
JedisPoolConfig poolConfig = new JedisPoolConfig.Builder().setHost(redisHost).setPort(redisPort).build();
RedisDataTypeDescription dataTypeDescription = new RedisDataTypeDescription(RedisDataTypeDescription.RedisDataType.HASH, "test");
StateFactory factory = RedisMapState.transactional(poolConfig, dataTypeDescription);
TridentTopology topology = new TridentTopology();
Stream stream = topology.newStream("spout1", spout);
TridentState state = stream.groupBy(new Fields("word")).persistentAggregate(factory, new Fields("count"), new Sum(), new Fields("sum"));
stream.stateQuery(state, new Fields("word"), new MapGet(), new Fields("sum")).each(new Fields("word", "sum"), new PrintFunction(), new Fields());
return topology.build();
}
use of org.apache.storm.tuple.Fields in project storm by apache.
the class InsertWordCount method main.
public static void main(String[] args) throws Exception {
Config config = new Config();
String url = TEST_MONGODB_URL;
String collectionName = TEST_MONGODB_COLLECTION_NAME;
if (args.length >= 2) {
url = args[0];
collectionName = args[1];
}
WordSpout spout = new WordSpout();
WordCounter bolt = new WordCounter();
MongoMapper mapper = new SimpleMongoMapper().withFields("word", "count");
MongoInsertBolt insertBolt = new MongoInsertBolt(url, collectionName, mapper);
// wordSpout ==> countBolt ==> MongoInsertBolt
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout(WORD_SPOUT, spout, 1);
builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
builder.setBolt(INSERT_BOLT, insertBolt, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
if (args.length == 2) {
try (LocalCluster cluster = new LocalCluster();
LocalTopology topo = cluster.submitTopology("test", config, builder.createTopology())) {
Thread.sleep(30000);
}
System.exit(0);
} else if (args.length == 3) {
StormSubmitter.submitTopology(args[2], config, builder.createTopology());
} else {
System.out.println("Usage: InsertWordCount <mongodb url> <mongodb collection> [topology name]");
}
}
Aggregations