Search in sources :

Example 16 with Stream

use of org.apache.storm.trident.Stream in project storm by apache.

the class SampleOpenTsdbTridentTopology method main.

public static void main(String[] args) throws Exception {
    if (args.length == 0) {
        throw new IllegalArgumentException("There should be at least one argument. Run as `SampleOpenTsdbTridentTopology <tsdb-url>`");
    }
    String tsdbUrl = args[0];
    final OpenTsdbClient.Builder openTsdbClientBuilder = OpenTsdbClient.newBuilder(tsdbUrl);
    final OpenTsdbStateFactory openTsdbStateFactory = new OpenTsdbStateFactory(openTsdbClientBuilder, Collections.singletonList(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER));
    TridentTopology tridentTopology = new TridentTopology();
    final Stream stream = tridentTopology.newStream("metric-tsdb-stream", new MetricGenBatchSpout(10));
    stream.peek(new Consumer() {

        @Override
        public void accept(TridentTuple input) {
            LOG.info("########### Received tuple: [{}]", input);
        }
    }).partitionPersist(openTsdbStateFactory, MetricGenSpout.DEFAULT_METRIC_FIELDS, new OpenTsdbStateUpdater());
    Config conf = new Config();
    conf.setDebug(true);
    String topoName = "word-count";
    if (args.length > 1) {
        topoName = args[1];
    }
    conf.setNumWorkers(3);
    StormSubmitter.submitTopologyWithProgressBar(topoName, conf, tridentTopology.build());
}
Also used : OpenTsdbClient(org.apache.storm.opentsdb.client.OpenTsdbClient) Consumer(org.apache.storm.trident.operation.Consumer) TridentTopology(org.apache.storm.trident.TridentTopology) Config(org.apache.storm.Config) OpenTsdbStateUpdater(org.apache.storm.opentsdb.trident.OpenTsdbStateUpdater) OpenTsdbStateFactory(org.apache.storm.opentsdb.trident.OpenTsdbStateFactory) Stream(org.apache.storm.trident.Stream) TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Example 17 with Stream

use of org.apache.storm.trident.Stream in project storm by apache.

the class TridentKafkaTopology method buildTopology.

private static StormTopology buildTopology(String brokerConnectionString) {
    Fields fields = new Fields("word", "count");
    FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", "1"), new Values("trident", "1"), new Values("needs", "1"), new Values("javadoc", "1"));
    spout.setCycle(true);
    TridentTopology topology = new TridentTopology();
    Stream stream = topology.newStream("spout1", spout);
    Properties props = new Properties();
    props.put("bootstrap.servers", brokerConnectionString);
    props.put("acks", "1");
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    TridentKafkaStateFactory stateFactory = new TridentKafkaStateFactory().withProducerProperties(props).withKafkaTopicSelector(new DefaultTopicSelector("test")).withTridentTupleToKafkaMapper(new FieldNameBasedTupleToKafkaMapper("word", "count"));
    stream.partitionPersist(stateFactory, fields, new TridentKafkaUpdater(), new Fields());
    return topology.build();
}
Also used : FixedBatchSpout(org.apache.storm.trident.testing.FixedBatchSpout) Fields(org.apache.storm.tuple.Fields) TridentTopology(org.apache.storm.trident.TridentTopology) Values(org.apache.storm.tuple.Values) Stream(org.apache.storm.trident.Stream) DefaultTopicSelector(org.apache.storm.kafka.trident.selector.DefaultTopicSelector) Properties(java.util.Properties) FieldNameBasedTupleToKafkaMapper(org.apache.storm.kafka.trident.mapper.FieldNameBasedTupleToKafkaMapper)

Example 18 with Stream

use of org.apache.storm.trident.Stream in project storm by apache.

the class TridentProjectRel method tridentPlan.

@Override
public void tridentPlan(TridentPlanCreator planCreator) throws Exception {
    // SingleRel
    RelNode input = getInput();
    StormRelUtils.getStormRelInput(input).tridentPlan(planCreator);
    Stream inputStream = planCreator.pop().toStream();
    String stageName = StormRelUtils.getStageName(this);
    String projectionClassName = StormRelUtils.getClassName(this);
    List<String> outputFieldNames = getRowType().getFieldNames();
    int outputCount = outputFieldNames.size();
    List<RexNode> childExps = getChildExps();
    RelDataType inputRowType = getInput(0).getRowType();
    ExecutableExpression projectionInstance = planCreator.createScalarInstance(childExps, inputRowType, projectionClassName);
    Stream finalStream = inputStream.map(new EvaluationFunction(projectionInstance, outputCount, planCreator.getDataContext()), new Fields(outputFieldNames)).name(stageName);
    planCreator.addStream(finalStream);
}
Also used : Fields(org.apache.storm.tuple.Fields) RelNode(org.apache.calcite.rel.RelNode) EvaluationFunction(org.apache.storm.sql.runtime.trident.functions.EvaluationFunction) Stream(org.apache.storm.trident.Stream) RelDataType(org.apache.calcite.rel.type.RelDataType) RexNode(org.apache.calcite.rex.RexNode) ExecutableExpression(org.apache.storm.sql.runtime.calcite.ExecutableExpression)

Example 19 with Stream

use of org.apache.storm.trident.Stream in project storm by apache.

the class SampleDruidBoltTridentTopology method main.

public static void main(String[] args) throws Exception {
    if (args.length == 0) {
        throw new IllegalArgumentException("There should be at least one argument. Run as `SampleDruidBoltTridentTopology <zk-url>`");
    }
    TridentTopology tridentTopology = new TridentTopology();
    DruidBeamFactory druidBeamFactory = new SampleDruidBeamFactoryImpl(new HashMap<String, Object>());
    ITupleDruidEventMapper<Map<String, Object>> eventMapper = new TupleDruidEventMapper<>(TupleDruidEventMapper.DEFAULT_FIELD_NAME);
    final Stream stream = tridentTopology.newStream("batch-event-gen", new SimpleBatchSpout(10));
    stream.peek(new Consumer() {

        @Override
        public void accept(TridentTuple input) {
            LOG.info("########### Received tuple: [{}]", input);
        }
    }).partitionPersist(new DruidBeamStateFactory<Map<String, Object>>(druidBeamFactory, eventMapper), new Fields("event"), new DruidBeamStateUpdater());
    Config conf = new Config();
    conf.setDebug(true);
    conf.put("druid.tranquility.zk.connect", args[0]);
    if (args.length > 1) {
        conf.setNumWorkers(3);
        StormSubmitter.submitTopologyWithProgressBar(args[1], conf, tridentTopology.build());
    } else {
        conf.setMaxTaskParallelism(3);
        try (LocalCluster cluster = new LocalCluster();
            LocalTopology topo = cluster.submitTopology("druid-test", conf, tridentTopology.build())) {
            Thread.sleep(30000);
        }
        System.exit(0);
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) Config(org.apache.storm.Config) DruidBeamFactory(org.apache.storm.druid.bolt.DruidBeamFactory) DruidBeamStateUpdater(org.apache.storm.druid.trident.DruidBeamStateUpdater) LocalTopology(org.apache.storm.LocalCluster.LocalTopology) Fields(org.apache.storm.tuple.Fields) Consumer(org.apache.storm.trident.operation.Consumer) TridentTopology(org.apache.storm.trident.TridentTopology) TupleDruidEventMapper(org.apache.storm.druid.bolt.TupleDruidEventMapper) ITupleDruidEventMapper(org.apache.storm.druid.bolt.ITupleDruidEventMapper) Stream(org.apache.storm.trident.Stream) HashMap(java.util.HashMap) Map(java.util.Map) TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Example 20 with Stream

use of org.apache.storm.trident.Stream in project storm by apache.

the class WordCountTrident method buildTopology.

public static StormTopology buildTopology(String hbaseRoot) {
    Fields fields = new Fields("word", "count");
    FixedBatchSpout spout = new FixedBatchSpout(fields, 4, new Values("storm", 1), new Values("trident", 1), new Values("needs", 1), new Values("javadoc", 1));
    spout.setCycle(true);
    TridentHBaseMapper tridentHBaseMapper = new SimpleTridentHBaseMapper().withColumnFamily("cf").withColumnFields(new Fields("word")).withCounterFields(new Fields("count")).withRowKeyField("word");
    HBaseValueMapper rowToStormValueMapper = new WordCountValueMapper();
    HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
    projectionCriteria.addColumn(new HBaseProjectionCriteria.ColumnMetaData("cf", "count"));
    HBaseState.Options options = new HBaseState.Options().withConfigKey(hbaseRoot).withDurability(Durability.SYNC_WAL).withMapper(tridentHBaseMapper).withProjectionCriteria(projectionCriteria).withRowToStormValueMapper(rowToStormValueMapper).withTableName("WordCount");
    StateFactory factory = new HBaseStateFactory(options);
    TridentTopology topology = new TridentTopology();
    Stream stream = topology.newStream("spout1", spout);
    stream.partitionPersist(factory, fields, new HBaseUpdater(), new Fields());
    TridentState state = topology.newStaticState(factory);
    stream = stream.stateQuery(state, new Fields("word"), new HBaseQuery(), new Fields("columnName", "columnValue"));
    stream.each(new Fields("word", "columnValue"), new PrintFunction(), new Fields());
    return topology.build();
}
Also used : HBaseUpdater(org.apache.storm.hbase.trident.state.HBaseUpdater) TridentState(org.apache.storm.trident.TridentState) Values(org.apache.storm.tuple.Values) HBaseProjectionCriteria(org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria) WordCountValueMapper(org.apache.storm.hbase.topology.WordCountValueMapper) HBaseValueMapper(org.apache.storm.hbase.bolt.mapper.HBaseValueMapper) HBaseStateFactory(org.apache.storm.hbase.trident.state.HBaseStateFactory) FixedBatchSpout(org.apache.storm.trident.testing.FixedBatchSpout) Fields(org.apache.storm.tuple.Fields) SimpleTridentHBaseMapper(org.apache.storm.hbase.trident.mapper.SimpleTridentHBaseMapper) StateFactory(org.apache.storm.trident.state.StateFactory) HBaseStateFactory(org.apache.storm.hbase.trident.state.HBaseStateFactory) HBaseState(org.apache.storm.hbase.trident.state.HBaseState) TridentTopology(org.apache.storm.trident.TridentTopology) Stream(org.apache.storm.trident.Stream) SimpleTridentHBaseMapper(org.apache.storm.hbase.trident.mapper.SimpleTridentHBaseMapper) TridentHBaseMapper(org.apache.storm.hbase.trident.mapper.TridentHBaseMapper) HBaseQuery(org.apache.storm.hbase.trident.state.HBaseQuery)

Aggregations

Stream (org.apache.storm.trident.Stream)30 Fields (org.apache.storm.tuple.Fields)27 TridentTopology (org.apache.storm.trident.TridentTopology)25 TridentState (org.apache.storm.trident.TridentState)13 FixedBatchSpout (org.apache.storm.trident.testing.FixedBatchSpout)13 Values (org.apache.storm.tuple.Values)13 StateFactory (org.apache.storm.trident.state.StateFactory)12 RelNode (org.apache.calcite.rel.RelNode)4 Consumer (org.apache.storm.trident.operation.Consumer)4 Debug (org.apache.storm.trident.operation.builtin.Debug)4 TridentTuple (org.apache.storm.trident.tuple.TridentTuple)4 RelDataType (org.apache.calcite.rel.type.RelDataType)3 RexNode (org.apache.calcite.rex.RexNode)3 Config (org.apache.storm.Config)3 MapGet (org.apache.storm.trident.operation.builtin.MapGet)3 Sum (org.apache.storm.trident.operation.builtin.Sum)3 FileInputStream (java.io.FileInputStream)2 InputStream (java.io.InputStream)2 InetSocketAddress (java.net.InetSocketAddress)2 HashSet (java.util.HashSet)2