Search in sources :

Example 1 with PropConfig

use of ypf412.storm.util.PropConfig in project storm-hbase by ypf412.

the class OutputTopology method main.

/**
 * HBase Data Output Topology
 * @param args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();
    PropConfig pc = new PropConfig("storm.properties");
    int topoWorkers = Integer.valueOf(pc.getProperty("storm.topolopy.workers"));
    int spoutTasks = Integer.valueOf(pc.getProperty("storm.spout.tasks"));
    builder.setSpout("hbaseSpout", new HBaseSpout(), spoutTasks);
    int boltTasks = spoutTasks;
    builder.setBolt("outputBolt", new OutputBolt(), boltTasks).fieldsGrouping("hbaseSpout", new Fields("sharding"));
    Config conf = new Config();
    conf.put(Constants.STORM_PROP_CONF_FILE, "storm.properties");
    conf.put(Constants.HBASE_PROP_CONF_FILE, "hbase.properties");
    if (args != null && args.length > 0) {
        // run on storm cluster
        conf.setNumAckers(1);
        conf.setNumWorkers(topoWorkers);
        StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
    } else {
        // run on local cluster
        conf.setMaxTaskParallelism(3);
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("test", conf, builder.createTopology());
        Utils.sleep(10000);
        cluster.killTopology("test");
        cluster.shutdown();
    }
}
Also used : LocalCluster(backtype.storm.LocalCluster) HBaseSpout(ypf412.storm.spout.HBaseSpout) Fields(backtype.storm.tuple.Fields) TopologyBuilder(backtype.storm.topology.TopologyBuilder) Config(backtype.storm.Config) PropConfig(ypf412.storm.util.PropConfig) PropConfig(ypf412.storm.util.PropConfig) OutputBolt(ypf412.storm.bolt.OutputBolt)

Example 2 with PropConfig

use of ypf412.storm.util.PropConfig in project storm-hbase by ypf412.

the class HBaseSpout method open.

@Override
public void open(@SuppressWarnings("rawtypes") Map conf, TopologyContext context, SpoutOutputCollector collector) {
    this.collector = collector;
    String stormPropPath = (String) conf.get(Constants.STORM_PROP_CONF_FILE);
    PropConfig stormPropConfig;
    try {
        stormPropConfig = new PropConfig(stormPropPath);
    } catch (IOException e1) {
        LOG.error("Failed to load properties", e1);
        throw new RuntimeException(e1);
    }
    int queueSize = Constants.STORM_SPOUT_DEFAULT_QUEUE_SIZE;
    String queueSizeStr = stormPropConfig.getProperty("storm.spout.queue_size");
    if (queueSizeStr != null)
        queueSize = Integer.valueOf(queueSizeStr);
    this.queue = new LinkedBlockingQueue<Values>(queueSize);
    String startTs = stormPropConfig.getProperty("spout.start_timestamp");
    if (startTs != null) {
        // scan HBase table from the specified timestamp
        this.startTimestamp = Integer.valueOf(startTs);
    } else {
        // otherwise scan HBase table from 3 minutes ago
        this.startTimestamp = 0;
    }
    String stopTs = stormPropConfig.getProperty("spout.stop_timestamp");
    if (stopTs != null) {
        // scan HBase table until the specified timestamp
        this.stopTimestamp = Integer.valueOf(stopTs);
    } else {
        // otherwise scan HBase table unit now
        this.stopTimestamp = 0;
    }
    String hbasePropPath = (String) conf.get(Constants.HBASE_PROP_CONF_FILE);
    PropConfig hbasePropConfig;
    try {
        hbasePropConfig = new PropConfig(hbasePropPath);
    } catch (IOException e1) {
        LOG.error("Failed to load properties", e1);
        throw new RuntimeException(e1);
    }
    short shardingNum = Constants.HBASE_DEFAULT_SHARDING_NUM;
    String shardingStr = hbasePropConfig.getProperty("hbase.table.sharding");
    if (shardingStr != null)
        shardingNum = Short.valueOf(shardingStr);
    // According to the task id, decide which sharding partitions this task will deal with.
    // And create a thread for every sharding partition.
    int taskId = context.getThisTaskId();
    String componentId = context.getComponentId(taskId);
    int taskNum = context.getComponentTasks(componentId).size();
    int rem = taskId % taskNum;
    try {
        for (short shardingKey = 0; shardingKey < shardingNum; shardingKey++) {
            if (shardingKey % taskNum == rem) {
                Thread scanThread = new Thread(new ShardScanner(shardingKey, hbasePropConfig));
                scanThread.setDaemon(true);
                scanThread.start();
            }
        }
    } catch (Exception e) {
        LOG.error("failed to create scan thread", e);
        throw new RuntimeException(e);
    }
}
Also used : PropConfig(ypf412.storm.util.PropConfig) Values(backtype.storm.tuple.Values) IOException(java.io.IOException) IOException(java.io.IOException)

Example 3 with PropConfig

use of ypf412.storm.util.PropConfig in project storm-hbase by ypf412.

the class HBaseSpoutTest method testHBaseSpout.

@Test
public void testHBaseSpout() throws Exception {
    hbaseUtil.cleanupTestDir();
    HBaseTestUtil.writeLocalHBaseXml(hbaseUtil);
    PropConfig hbasePropConfig;
    try {
        hbasePropConfig = new PropConfig("hbase.properties");
    } catch (IOException e1) {
        throw new RuntimeException(e1);
    }
    String tableName = Constants.HBASE_DEFAULT_TABLE_NAME;
    String tableNameStr = hbasePropConfig.getProperty("hbase.table.name");
    if (tableNameStr != null && !tableNameStr.equals(""))
        tableName = tableNameStr;
    String columnFamily = Constants.HBASE_DEFAULT_COLUMN_FAMILY;
    String columnFamilyStr = hbasePropConfig.getProperty("hbase.table.column_family");
    if (columnFamilyStr != null && !columnFamilyStr.equals(""))
        columnFamily = columnFamilyStr;
    HTable htable = hbaseUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(columnFamily));
    HBaseTestUtil.loadStreamDataToHBase(ClassLoader.getSystemResource("datasource.txt").getPath(), htable, hbasePropConfig);
    int count = hbaseUtil.countRows(htable);
    assertTrue(count > 0);
    System.out.println("*** load " + count + " rows into hbase test table: " + tableName);
    stormUtil.getConfig().put(Constants.STORM_PROP_CONF_FILE, "storm.properties");
    stormUtil.getConfig().put(Constants.HBASE_PROP_CONF_FILE, "hbase.properties");
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("hbaseSpout", new HBaseSpout());
    StormTestBolt sinkBolt = new StormTestBolt();
    List<Object[]> tuples = StormTestUtil.loadTuples("datasource.txt");
    for (Object[] tuple : tuples) {
        sinkBolt.expectSeq(tuple);
    }
    builder.setBolt("sinkBolt", sinkBolt).fieldsGrouping("hbaseSpout", new Fields("sharding"));
    stormUtil.submitTopology(builder, 5000);
    hbaseUtil.deleteTable(Bytes.toBytes(tableName));
}
Also used : StormTestBolt(ypf412.storm.StormTestUtil.StormTestBolt) TopologyBuilder(backtype.storm.topology.TopologyBuilder) PropConfig(ypf412.storm.util.PropConfig) IOException(java.io.IOException) HTable(org.apache.hadoop.hbase.client.HTable) HBaseSpout(ypf412.storm.spout.HBaseSpout) Fields(backtype.storm.tuple.Fields) Test(org.junit.Test)

Example 4 with PropConfig

use of ypf412.storm.util.PropConfig in project storm-hbase by ypf412.

the class DumpToHBaseTopology method main.

/**
 * HBase Data Dump to Another HBase Table Topology
 * @param args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();
    PropConfig pc = new PropConfig("storm.properties");
    int topoWorkers = Integer.valueOf(pc.getProperty("storm.topolopy.workers"));
    int spoutTasks = Integer.valueOf(pc.getProperty("storm.spout.tasks"));
    builder.setSpout("hbaseSpout", new HBaseSpout(), spoutTasks);
    int boltTasks = spoutTasks;
    builder.setBolt("dumpBolt", new DumpToHBaseBolt(), boltTasks).fieldsGrouping("hbaseSpout", new Fields("sharding"));
    Config conf = new Config();
    conf.put(Constants.STORM_PROP_CONF_FILE, "storm.properties");
    conf.put(Constants.HBASE_PROP_CONF_FILE, "hbase.properties");
    if (args != null && args.length > 0) {
        // run on storm cluster
        conf.setNumAckers(1);
        conf.setNumWorkers(topoWorkers);
        StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
    } else {
        // run on local cluster
        conf.setMaxTaskParallelism(3);
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("test", conf, builder.createTopology());
        Utils.sleep(10000);
        cluster.killTopology("test");
        cluster.shutdown();
    }
}
Also used : DumpToHBaseBolt(ypf412.storm.bolt.DumpToHBaseBolt) LocalCluster(backtype.storm.LocalCluster) HBaseSpout(ypf412.storm.spout.HBaseSpout) Fields(backtype.storm.tuple.Fields) TopologyBuilder(backtype.storm.topology.TopologyBuilder) Config(backtype.storm.Config) PropConfig(ypf412.storm.util.PropConfig) PropConfig(ypf412.storm.util.PropConfig)

Aggregations

PropConfig (ypf412.storm.util.PropConfig)4 TopologyBuilder (backtype.storm.topology.TopologyBuilder)3 Fields (backtype.storm.tuple.Fields)3 HBaseSpout (ypf412.storm.spout.HBaseSpout)3 Config (backtype.storm.Config)2 LocalCluster (backtype.storm.LocalCluster)2 IOException (java.io.IOException)2 Values (backtype.storm.tuple.Values)1 HTable (org.apache.hadoop.hbase.client.HTable)1 Test (org.junit.Test)1 StormTestBolt (ypf412.storm.StormTestUtil.StormTestBolt)1 DumpToHBaseBolt (ypf412.storm.bolt.DumpToHBaseBolt)1 OutputBolt (ypf412.storm.bolt.OutputBolt)1