Search in sources :

Example 1 with LocalCluster

use of org.apache.storm.LocalCluster in project storm by apache.

the class HiveTopology method main.

public static void main(String[] args) throws Exception {
    String metaStoreURI = args[0];
    String dbName = args[1];
    String tblName = args[2];
    String[] colNames = { "id", "name", "phone", "street", "city", "state" };
    Config config = new Config();
    config.setNumWorkers(1);
    UserDataSpout spout = new UserDataSpout();
    DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper().withColumnFields(new Fields(colNames));
    HiveOptions hiveOptions;
    if (args.length == 6) {
        hiveOptions = new HiveOptions(metaStoreURI, dbName, tblName, mapper).withTxnsPerBatch(10).withBatchSize(100).withIdleTimeout(10).withKerberosKeytab(args[4]).withKerberosPrincipal(args[5]);
    } else {
        hiveOptions = new HiveOptions(metaStoreURI, dbName, tblName, mapper).withTxnsPerBatch(10).withBatchSize(100).withIdleTimeout(10).withMaxOpenConnections(1);
    }
    HiveBolt hiveBolt = new HiveBolt(hiveOptions);
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout(USER_SPOUT_ID, spout, 1);
    // SentenceSpout --> MyBolt
    builder.setBolt(BOLT_ID, hiveBolt, 1).shuffleGrouping(USER_SPOUT_ID);
    if (args.length == 3) {
        try (LocalCluster cluster = new LocalCluster();
            LocalTopology topo = cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology())) {
            waitForSeconds(20);
        }
        System.exit(0);
    } else if (args.length >= 4) {
        StormSubmitter.submitTopology(args[3], config, builder.createTopology());
    } else {
        System.out.println("Usage: HiveTopology metastoreURI dbName tableName [topologyNamey] [keytab file] [principal name]");
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) Fields(org.apache.storm.tuple.Fields) TopologyBuilder(org.apache.storm.topology.TopologyBuilder) Config(org.apache.storm.Config) DelimitedRecordHiveMapper(org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper) HiveOptions(org.apache.storm.hive.common.HiveOptions) LocalTopology(org.apache.storm.LocalCluster.LocalTopology)

Example 2 with LocalCluster

use of org.apache.storm.LocalCluster in project storm by apache.

the class HiveTopologyPartitioned method main.

public static void main(String[] args) throws Exception {
    String metaStoreURI = args[0];
    String dbName = args[1];
    String tblName = args[2];
    String[] partNames = { "city", "state" };
    String[] colNames = { "id", "name", "phone", "street" };
    Config config = new Config();
    config.setNumWorkers(1);
    UserDataSpout spout = new UserDataSpout();
    DelimitedRecordHiveMapper mapper = new DelimitedRecordHiveMapper().withColumnFields(new Fields(colNames)).withPartitionFields(new Fields(partNames));
    HiveOptions hiveOptions;
    if (args.length == 6) {
        hiveOptions = new HiveOptions(metaStoreURI, dbName, tblName, mapper).withTxnsPerBatch(10).withBatchSize(1000).withIdleTimeout(10).withKerberosKeytab(args[4]).withKerberosPrincipal(args[5]);
    } else {
        hiveOptions = new HiveOptions(metaStoreURI, dbName, tblName, mapper).withTxnsPerBatch(10).withBatchSize(1000).withIdleTimeout(10);
    }
    HiveBolt hiveBolt = new HiveBolt(hiveOptions);
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout(USER_SPOUT_ID, spout, 1);
    // SentenceSpout --> MyBolt
    builder.setBolt(BOLT_ID, hiveBolt, 1).shuffleGrouping(USER_SPOUT_ID);
    if (args.length == 3) {
        try (LocalCluster cluster = new LocalCluster();
            LocalTopology topo = cluster.submitTopology(TOPOLOGY_NAME, config, builder.createTopology())) {
            waitForSeconds(20);
        }
        System.exit(0);
    } else if (args.length >= 4) {
        StormSubmitter.submitTopology(args[3], config, builder.createTopology());
    } else {
        System.out.println("Usage: HiveTopologyPartitioned metastoreURI dbName tableName [topologyNamey] [keytab file] [principal name]");
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) Fields(org.apache.storm.tuple.Fields) TopologyBuilder(org.apache.storm.topology.TopologyBuilder) Config(org.apache.storm.Config) DelimitedRecordHiveMapper(org.apache.storm.hive.bolt.mapper.DelimitedRecordHiveMapper) HiveOptions(org.apache.storm.hive.common.HiveOptions) LocalTopology(org.apache.storm.LocalCluster.LocalTopology)

Example 3 with LocalCluster

use of org.apache.storm.LocalCluster in project storm by apache.

the class AbstractUserTopology method execute.

public void execute(String[] args) throws Exception {
    if (args.length != 4 && args.length != 5) {
        System.out.println("Usage: " + this.getClass().getSimpleName() + " <dataSourceClassName> <dataSource.url> " + "<user> <password> [topology name]");
        System.exit(-1);
    }
    Map map = Maps.newHashMap();
    //com.mysql.jdbc.jdbc2.optional.MysqlDataSource
    map.put("dataSourceClassName", args[0]);
    //jdbc:mysql://localhost/test
    map.put("dataSource.url", args[1]);
    //root
    map.put("dataSource.user", args[2]);
    if (args.length == 4) {
        //password
        map.put("dataSource.password", args[3]);
    }
    Config config = new Config();
    config.put(JDBC_CONF, map);
    ConnectionProvider connectionProvider = new HikariCPConnectionProvider(map);
    connectionProvider.prepare();
    int queryTimeoutSecs = 60;
    JdbcClient jdbcClient = new JdbcClient(connectionProvider, queryTimeoutSecs);
    for (String sql : setupSqls) {
        jdbcClient.executeSql(sql);
    }
    this.userSpout = new UserSpout();
    this.jdbcMapper = new SimpleJdbcMapper(TABLE_NAME, connectionProvider);
    connectionProvider.cleanup();
    Fields outputFields = new Fields("user_id", "user_name", "dept_name", "create_date");
    List<Column> queryParamColumns = Lists.newArrayList(new Column("user_id", Types.INTEGER));
    this.jdbcLookupMapper = new SimpleJdbcLookupMapper(outputFields, queryParamColumns);
    this.connectionProvider = new HikariCPConnectionProvider(map);
    if (args.length == 4) {
        try (LocalCluster cluster = new LocalCluster();
            LocalTopology topo = cluster.submitTopology("test", config, getTopology())) {
            Thread.sleep(30000);
        }
        System.exit(0);
    } else {
        StormSubmitter.submitTopology(args[4], config, getTopology());
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) UserSpout(org.apache.storm.jdbc.spout.UserSpout) Config(org.apache.storm.Config) JdbcClient(org.apache.storm.jdbc.common.JdbcClient) LocalTopology(org.apache.storm.LocalCluster.LocalTopology) HikariCPConnectionProvider(org.apache.storm.jdbc.common.HikariCPConnectionProvider) ConnectionProvider(org.apache.storm.jdbc.common.ConnectionProvider) HikariCPConnectionProvider(org.apache.storm.jdbc.common.HikariCPConnectionProvider) SimpleJdbcLookupMapper(org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper) Fields(org.apache.storm.tuple.Fields) SimpleJdbcMapper(org.apache.storm.jdbc.mapper.SimpleJdbcMapper) Column(org.apache.storm.jdbc.common.Column) Map(java.util.Map)

Example 4 with LocalCluster

use of org.apache.storm.LocalCluster in project storm by apache.

the class WordCountTridentRedisClusterMap method main.

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
        System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) 127.0.0.1:6379,127.0.0.1:6380");
        System.exit(1);
    }
    Integer flag = Integer.valueOf(args[0]);
    String redisHostPort = args[1];
    Config conf = new Config();
    conf.setMaxSpoutPending(5);
    if (flag == 0) {
        try (LocalCluster cluster = new LocalCluster();
            LocalTopology topo = cluster.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort))) {
            Thread.sleep(60 * 1000);
        }
        System.exit(0);
    } else if (flag == 1) {
        conf.setNumWorkers(3);
        StormSubmitter.submitTopology("test_wordCounter_for_redis", conf, buildTopology(redisHostPort));
    } else {
        System.out.println("Usage: WordCountTrident 0(storm-local)|1(storm-cluster) redis-host redis-port");
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) JedisClusterConfig(org.apache.storm.redis.common.config.JedisClusterConfig) Config(org.apache.storm.Config) LocalTopology(org.apache.storm.LocalCluster.LocalTopology)

Example 5 with LocalCluster

use of org.apache.storm.LocalCluster in project storm by apache.

the class SolrTopology method submitTopologyLocalCluster.

protected void submitTopologyLocalCluster(StormTopology topology, Config config) throws Exception {
    try (LocalCluster cluster = new LocalCluster();
        LocalTopology topo = cluster.submitTopology("test", config, topology)) {
        Thread.sleep(10000);
        System.out.println("Killing topology per client's request");
    }
    System.exit(0);
}
Also used : LocalCluster(org.apache.storm.LocalCluster) LocalTopology(org.apache.storm.LocalCluster.LocalTopology)

Aggregations

LocalCluster (org.apache.storm.LocalCluster)76 Config (org.apache.storm.Config)70 LocalTopology (org.apache.storm.LocalCluster.LocalTopology)52 TopologyBuilder (org.apache.storm.topology.TopologyBuilder)28 Fields (org.apache.storm.tuple.Fields)22 Map (java.util.Map)14 StreamBuilder (org.apache.storm.streams.StreamBuilder)9 RandomIntegerSpout (org.apache.storm.starter.spout.RandomIntegerSpout)7 HashMap (java.util.HashMap)6 LocalDRPC (org.apache.storm.LocalDRPC)6 JedisPoolConfig (org.apache.storm.redis.common.config.JedisPoolConfig)6 TestWordSpout (org.apache.storm.testing.TestWordSpout)5 FileInputStream (java.io.FileInputStream)4 InputStream (java.io.InputStream)4 StormSubmitter (org.apache.storm.StormSubmitter)4 StormTopology (org.apache.storm.generated.StormTopology)4 ValueMapper (org.apache.storm.streams.operations.mappers.ValueMapper)4 TopologyContext (org.apache.storm.task.TopologyContext)4 Utils (org.apache.storm.utils.Utils)4 Yaml (org.yaml.snakeyaml.Yaml)4