Search in sources :

Example 1 with Column

use of org.apache.storm.jdbc.common.Column in project storm by apache.

the class AbstractUserTopology method execute.

public void execute(String[] args) throws Exception {
    if (args.length != 4 && args.length != 5) {
        System.out.println("Usage: " + this.getClass().getSimpleName() + " <dataSourceClassName> <dataSource.url> " + "<user> <password> [topology name]");
        System.exit(-1);
    }
    Map map = Maps.newHashMap();
    //com.mysql.jdbc.jdbc2.optional.MysqlDataSource
    map.put("dataSourceClassName", args[0]);
    //jdbc:mysql://localhost/test
    map.put("dataSource.url", args[1]);
    //root
    map.put("dataSource.user", args[2]);
    if (args.length == 4) {
        //password
        map.put("dataSource.password", args[3]);
    }
    Config config = new Config();
    config.put(JDBC_CONF, map);
    ConnectionProvider connectionProvider = new HikariCPConnectionProvider(map);
    connectionProvider.prepare();
    int queryTimeoutSecs = 60;
    JdbcClient jdbcClient = new JdbcClient(connectionProvider, queryTimeoutSecs);
    for (String sql : setupSqls) {
        jdbcClient.executeSql(sql);
    }
    this.userSpout = new UserSpout();
    this.jdbcMapper = new SimpleJdbcMapper(TABLE_NAME, connectionProvider);
    connectionProvider.cleanup();
    Fields outputFields = new Fields("user_id", "user_name", "dept_name", "create_date");
    List<Column> queryParamColumns = Lists.newArrayList(new Column("user_id", Types.INTEGER));
    this.jdbcLookupMapper = new SimpleJdbcLookupMapper(outputFields, queryParamColumns);
    this.connectionProvider = new HikariCPConnectionProvider(map);
    if (args.length == 4) {
        try (LocalCluster cluster = new LocalCluster();
            LocalTopology topo = cluster.submitTopology("test", config, getTopology())) {
            Thread.sleep(30000);
        }
        System.exit(0);
    } else {
        StormSubmitter.submitTopology(args[4], config, getTopology());
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) UserSpout(org.apache.storm.jdbc.spout.UserSpout) Config(org.apache.storm.Config) JdbcClient(org.apache.storm.jdbc.common.JdbcClient) LocalTopology(org.apache.storm.LocalCluster.LocalTopology) HikariCPConnectionProvider(org.apache.storm.jdbc.common.HikariCPConnectionProvider) ConnectionProvider(org.apache.storm.jdbc.common.ConnectionProvider) HikariCPConnectionProvider(org.apache.storm.jdbc.common.HikariCPConnectionProvider) SimpleJdbcLookupMapper(org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper) Fields(org.apache.storm.tuple.Fields) SimpleJdbcMapper(org.apache.storm.jdbc.mapper.SimpleJdbcMapper) Column(org.apache.storm.jdbc.common.Column) Map(java.util.Map)

Example 2 with Column

use of org.apache.storm.jdbc.common.Column in project storm by apache.

the class UserPersistanceTridentTopology method getTopology.

@Override
public StormTopology getTopology() {
    TridentTopology topology = new TridentTopology();
    JdbcState.Options options = new JdbcState.Options().withConnectionProvider(connectionProvider).withMapper(this.jdbcMapper).withJdbcLookupMapper(new SimpleJdbcLookupMapper(new Fields("dept_name"), Lists.newArrayList(new Column("user_id", Types.INTEGER)))).withTableName(TABLE_NAME).withSelectQuery(SELECT_QUERY);
    JdbcStateFactory jdbcStateFactory = new JdbcStateFactory(options);
    Stream stream = topology.newStream("userSpout", new UserSpout());
    TridentState state = topology.newStaticState(jdbcStateFactory);
    stream = stream.stateQuery(state, new Fields("user_id", "user_name", "create_date"), new JdbcQuery(), new Fields("dept_name"));
    stream.partitionPersist(jdbcStateFactory, new Fields("user_id", "user_name", "dept_name", "create_date"), new JdbcUpdater(), new Fields());
    return topology.build();
}
Also used : JdbcState(org.apache.storm.jdbc.trident.state.JdbcState) JdbcQuery(org.apache.storm.jdbc.trident.state.JdbcQuery) SimpleJdbcLookupMapper(org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper) UserSpout(org.apache.storm.jdbc.spout.UserSpout) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Column(org.apache.storm.jdbc.common.Column) TridentState(org.apache.storm.trident.TridentState) JdbcStateFactory(org.apache.storm.jdbc.trident.state.JdbcStateFactory) Stream(org.apache.storm.trident.Stream) JdbcUpdater(org.apache.storm.jdbc.trident.state.JdbcUpdater)

Example 3 with Column

use of org.apache.storm.jdbc.common.Column in project storm by apache.

the class JdbcLookupBolt method process.

@Override
protected void process(Tuple tuple) {
    try {
        List<Column> columns = jdbcLookupMapper.getColumns(tuple);
        List<List<Column>> result = jdbcClient.select(this.selectQuery, columns);
        if (result != null && result.size() != 0) {
            for (List<Column> row : result) {
                List<Values> values = jdbcLookupMapper.toTuple(tuple, row);
                for (Values value : values) {
                    collector.emit(tuple, value);
                }
            }
        }
        this.collector.ack(tuple);
    } catch (Exception e) {
        this.collector.reportError(e);
        this.collector.fail(tuple);
    }
}
Also used : Column(org.apache.storm.jdbc.common.Column) Values(org.apache.storm.tuple.Values) List(java.util.List)

Example 4 with Column

use of org.apache.storm.jdbc.common.Column in project storm by apache.

the class SimpleJdbcLookupMapper method toTuple.

@Override
public List<Values> toTuple(ITuple input, List<Column> columns) {
    Values values = new Values();
    for (String field : outputFields) {
        if (input.contains(field)) {
            values.add(input.getValueByField(field));
        } else {
            for (Column column : columns) {
                if (column.getColumnName().equalsIgnoreCase(field)) {
                    values.add(column.getVal());
                }
            }
        }
    }
    List<Values> result = new ArrayList<Values>();
    result.add(values);
    return result;
}
Also used : Column(org.apache.storm.jdbc.common.Column) Values(org.apache.storm.tuple.Values) ArrayList(java.util.ArrayList)

Example 5 with Column

use of org.apache.storm.jdbc.common.Column in project storm by apache.

the class SimpleJdbcMapper method getColumns.

@Override
public List<Column> getColumns(ITuple tuple) {
    List<Column> columns = new ArrayList<Column>();
    for (Column column : schemaColumns) {
        String columnName = column.getColumnName();
        Integer columnSqlType = column.getSqlType();
        if (Util.getJavaType(columnSqlType).equals(String.class)) {
            String value = tuple.getStringByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Short.class)) {
            Short value = tuple.getShortByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Integer.class)) {
            Integer value = tuple.getIntegerByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Long.class)) {
            Long value = tuple.getLongByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Double.class)) {
            Double value = tuple.getDoubleByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Float.class)) {
            Float value = tuple.getFloatByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Boolean.class)) {
            Boolean value = tuple.getBooleanByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(byte[].class)) {
            byte[] value = tuple.getBinaryByField(columnName);
            columns.add(new Column(columnName, value, columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Date.class)) {
            Long value = tuple.getLongByField(columnName);
            columns.add(new Column(columnName, new Date(value), columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Time.class)) {
            Long value = tuple.getLongByField(columnName);
            columns.add(new Column(columnName, new Time(value), columnSqlType));
        } else if (Util.getJavaType(columnSqlType).equals(Timestamp.class)) {
            Long value = tuple.getLongByField(columnName);
            columns.add(new Column(columnName, new Timestamp(value), columnSqlType));
        } else {
            throw new RuntimeException("Unsupported java type in tuple " + Util.getJavaType(columnSqlType));
        }
    }
    return columns;
}
Also used : ArrayList(java.util.ArrayList) Time(java.sql.Time) Timestamp(java.sql.Timestamp) Date(java.sql.Date) Column(org.apache.storm.jdbc.common.Column)

Aggregations

Column (org.apache.storm.jdbc.common.Column)9 ArrayList (java.util.ArrayList)3 ConnectionProvider (org.apache.storm.jdbc.common.ConnectionProvider)3 HikariCPConnectionProvider (org.apache.storm.jdbc.common.HikariCPConnectionProvider)3 SimpleJdbcLookupMapper (org.apache.storm.jdbc.mapper.SimpleJdbcLookupMapper)3 SimpleJdbcMapper (org.apache.storm.jdbc.mapper.SimpleJdbcMapper)3 Fields (org.apache.storm.tuple.Fields)3 Values (org.apache.storm.tuple.Values)3 List (java.util.List)2 JdbcMapper (org.apache.storm.jdbc.mapper.JdbcMapper)2 UserSpout (org.apache.storm.jdbc.spout.UserSpout)2 Test (org.junit.Test)2 Date (java.sql.Date)1 Time (java.sql.Time)1 Timestamp (java.sql.Timestamp)1 Map (java.util.Map)1 Config (org.apache.storm.Config)1 LocalCluster (org.apache.storm.LocalCluster)1 LocalTopology (org.apache.storm.LocalCluster.LocalTopology)1 JdbcInsertBolt (org.apache.storm.jdbc.bolt.JdbcInsertBolt)1