Search in sources :

Example 16 with Fields

use of backtype.storm.tuple.Fields in project storm-hbase by ypf412.

the class OutputTopology method main.

/**
 * HBase Data Output Topology
 * @param args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();
    PropConfig pc = new PropConfig("storm.properties");
    int topoWorkers = Integer.valueOf(pc.getProperty("storm.topolopy.workers"));
    int spoutTasks = Integer.valueOf(pc.getProperty("storm.spout.tasks"));
    builder.setSpout("hbaseSpout", new HBaseSpout(), spoutTasks);
    int boltTasks = spoutTasks;
    builder.setBolt("outputBolt", new OutputBolt(), boltTasks).fieldsGrouping("hbaseSpout", new Fields("sharding"));
    Config conf = new Config();
    conf.put(Constants.STORM_PROP_CONF_FILE, "storm.properties");
    conf.put(Constants.HBASE_PROP_CONF_FILE, "hbase.properties");
    if (args != null && args.length > 0) {
        // run on storm cluster
        conf.setNumAckers(1);
        conf.setNumWorkers(topoWorkers);
        StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
    } else {
        // run on local cluster
        conf.setMaxTaskParallelism(3);
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology("test", conf, builder.createTopology());
        Utils.sleep(10000);
        cluster.killTopology("test");
        cluster.shutdown();
    }
}
Also used : LocalCluster(backtype.storm.LocalCluster) HBaseSpout(ypf412.storm.spout.HBaseSpout) Fields(backtype.storm.tuple.Fields) TopologyBuilder(backtype.storm.topology.TopologyBuilder) Config(backtype.storm.Config) PropConfig(ypf412.storm.util.PropConfig) PropConfig(ypf412.storm.util.PropConfig) OutputBolt(ypf412.storm.bolt.OutputBolt)

Example 17 with Fields

use of backtype.storm.tuple.Fields in project heron by twitter.

the class WordCountTopology method main.

/**
 * Main method
 */
public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException {
    if (args.length < 1) {
        throw new RuntimeException("Specify topology name");
    }
    int parallelism = 1;
    if (args.length > 1) {
        parallelism = Integer.parseInt(args[1]);
    }
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("word", new WordSpout(), parallelism);
    builder.setBolt("consumer", new ConsumerBolt(), parallelism).fieldsGrouping("word", new Fields("word"));
    Config conf = new Config();
    conf.setNumWorkers(parallelism);
    StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
}
Also used : Fields(backtype.storm.tuple.Fields) TopologyBuilder(backtype.storm.topology.TopologyBuilder) Config(backtype.storm.Config)

Example 18 with Fields

use of backtype.storm.tuple.Fields in project storm-hbase by jrkinley.

the class HBaseCountersBatchTopology method main.

/**
 * @param args
 * @throws InterruptedException
 */
public static void main(String[] args) throws InterruptedException {
    // Add transactional spout
    MemoryTransactionalSpout spout = new MemoryTransactionalSpout(values, new Fields("shortid", "url", "user", "date"), 3);
    TransactionalTopologyBuilder builder = new TransactionalTopologyBuilder("shorturl-count", "spout", spout, 2);
    // Build TupleTableConifg
    TupleTableConfig ttConfig = new TupleTableConfig("shorturl", "shortid");
    ttConfig.setBatch(false);
    ttConfig.addColumn("data", "clicks");
    ttConfig.addColumn("daily", "date");
    builder.setBolt("hbase-counters", new HBaseCountersBatchBolt(ttConfig), 2).fieldsGrouping("spout", new Fields("shortid"));
    LocalCluster cluster = new LocalCluster();
    Config stormConfig = new Config();
    stormConfig.setDebug(true);
    stormConfig.setMaxSpoutPending(3);
    cluster.submitTopology("hbase-example", stormConfig, builder.buildTopology());
    Thread.sleep(10000);
    cluster.shutdown();
}
Also used : LocalCluster(backtype.storm.LocalCluster) MemoryTransactionalSpout(backtype.storm.testing.MemoryTransactionalSpout) Fields(backtype.storm.tuple.Fields) HBaseCountersBatchBolt(backtype.storm.contrib.hbase.bolts.HBaseCountersBatchBolt) Config(backtype.storm.Config) TupleTableConfig(backtype.storm.contrib.hbase.utils.TupleTableConfig) TupleTableConfig(backtype.storm.contrib.hbase.utils.TupleTableConfig) TransactionalTopologyBuilder(backtype.storm.transactional.TransactionalTopologyBuilder)

Example 19 with Fields

use of backtype.storm.tuple.Fields in project jstorm by alibaba.

the class TestBackpressure method test.

/**
 * Can't run
 */
public static void test() {
    int spout_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_SPOUT_PARALLELISM_HINT), 1);
    int split_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_SPLIT_PARALLELISM_HINT), 1);
    int count_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_COUNT_PARALLELISM_HINT), 2);
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("spout", new FastRandomSentenceSpout(), spout_Parallelism_hint);
    builder.setBolt("split", new SplitSentence(), split_Parallelism_hint).shuffleGrouping("spout");
    builder.setBolt("count", new WordCount(), count_Parallelism_hint).fieldsGrouping("split", new Fields("word"));
    String[] className = Thread.currentThread().getStackTrace()[1].getClassName().split("\\.");
    String topologyName = className[className.length - 1];
    isLocal = JStormHelper.localMode(conf);
    conf.put(ConfigExtension.TOPOLOGY_BACKPRESSURE_ENABLE, true);
    conf.setNumWorkers(8);
    try {
        JStormHelper.runTopology(builder.createTopology(), topologyName, conf, 60 * 2, new Validator(conf), isLocal);
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        Assert.fail("Failed");
    }
}
Also used : Fields(backtype.storm.tuple.Fields) TopologyBuilder(backtype.storm.topology.TopologyBuilder)

Example 20 with Fields

use of backtype.storm.tuple.Fields in project jstorm by alibaba.

the class FastWordCountTopology method test.

public static void test() {
    int spout_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_SPOUT_PARALLELISM_HINT), 1);
    int split_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_SPLIT_PARALLELISM_HINT), 1);
    int count_Parallelism_hint = JStormUtils.parseInt(conf.get(TOPOLOGY_COUNT_PARALLELISM_HINT), 2);
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("spout", new FastRandomSentenceSpout(), spout_Parallelism_hint);
    builder.setBolt("split", new SplitSentence(), split_Parallelism_hint).localOrShuffleGrouping("spout");
    builder.setBolt("count", new WordCount(), count_Parallelism_hint).fieldsGrouping("split", new Fields("word"));
    String[] className = Thread.currentThread().getStackTrace()[1].getClassName().split("\\.");
    String topologyName = className[className.length - 1];
    isLocal = JStormHelper.localMode(conf);
    try {
        JStormHelper.runTopology(builder.createTopology(), topologyName, conf, 60, new JStormHelper.CheckAckedFail(conf), isLocal);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail("Failed");
    }
}
Also used : JStormHelper(com.alibaba.starter.utils.JStormHelper) Fields(backtype.storm.tuple.Fields) TopologyBuilder(backtype.storm.topology.TopologyBuilder)

Aggregations

Fields (backtype.storm.tuple.Fields)130 TopologyBuilder (backtype.storm.topology.TopologyBuilder)41 Config (backtype.storm.Config)24 TridentTopology (storm.trident.TridentTopology)21 Map (java.util.Map)20 HashMap (java.util.HashMap)18 Test (org.junit.Test)17 JStormHelper (com.alibaba.starter.utils.JStormHelper)16 Values (backtype.storm.tuple.Values)15 ArrayList (java.util.ArrayList)13 LocalCluster (backtype.storm.LocalCluster)12 Stream (storm.trident.Stream)12 StreamInfo (backtype.storm.generated.StreamInfo)10 FixedBatchSpout (storm.trident.testing.FixedBatchSpout)9 HashSet (java.util.HashSet)8 LocalDRPC (backtype.storm.LocalDRPC)7 TridentState (storm.trident.TridentState)7 Count (storm.trident.operation.builtin.Count)7 GroupedStream (storm.trident.fluent.GroupedStream)6 IAggregatableStream (storm.trident.fluent.IAggregatableStream)6