Search in sources :

Example 1 with TridentTuple

use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.

the class SampleOpenTsdbTridentTopology method main.

public static void main(String[] args) throws Exception {
    if (args.length == 0) {
        throw new IllegalArgumentException("There should be at least one argument. Run as `SampleOpenTsdbTridentTopology <tsdb-url>`");
    }
    String tsdbUrl = args[0];
    final OpenTsdbClient.Builder openTsdbClientBuilder = OpenTsdbClient.newBuilder(tsdbUrl);
    final OpenTsdbStateFactory openTsdbStateFactory = new OpenTsdbStateFactory(openTsdbClientBuilder, Collections.singletonList(TupleOpenTsdbDatapointMapper.DEFAULT_MAPPER));
    TridentTopology tridentTopology = new TridentTopology();
    final Stream stream = tridentTopology.newStream("metric-tsdb-stream", new MetricGenBatchSpout(10));
    stream.peek(new Consumer() {

        @Override
        public void accept(TridentTuple input) {
            LOG.info("########### Received tuple: [{}]", input);
        }
    }).partitionPersist(openTsdbStateFactory, MetricGenSpout.DEFAULT_METRIC_FIELDS, new OpenTsdbStateUpdater());
    Config conf = new Config();
    conf.setDebug(true);
    if (args.length > 1) {
        conf.setNumWorkers(3);
        StormSubmitter.submitTopologyWithProgressBar(args[1], conf, tridentTopology.build());
    } else {
        conf.setMaxTaskParallelism(3);
        try (LocalCluster cluster = new LocalCluster();
            LocalTopology topo = cluster.submitTopology("word-count", conf, tridentTopology.build())) {
            Thread.sleep(30000);
        }
        System.exit(0);
    }
}
Also used : LocalCluster(org.apache.storm.LocalCluster) Config(org.apache.storm.Config) OpenTsdbStateFactory(org.apache.storm.opentsdb.trident.OpenTsdbStateFactory) LocalTopology(org.apache.storm.LocalCluster.LocalTopology) OpenTsdbClient(org.apache.storm.opentsdb.client.OpenTsdbClient) Consumer(org.apache.storm.trident.operation.Consumer) TridentTopology(org.apache.storm.trident.TridentTopology) OpenTsdbStateUpdater(org.apache.storm.opentsdb.trident.OpenTsdbStateUpdater) Stream(org.apache.storm.trident.Stream) TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Example 2 with TridentTuple

use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.

the class TridentWindowingInmemoryStoreTopology method buildTopology.

public static StormTopology buildTopology(WindowsStoreFactory windowStore, WindowConfig windowConfig) throws Exception {
    FixedBatchSpout spout = new FixedBatchSpout(new Fields("sentence"), 3, new Values("the cow jumped over the moon"), new Values("the man went to the store and bought some candy"), new Values("four score and seven years ago"), new Values("how many apples can you eat"), new Values("to be or not to be the person"));
    spout.setCycle(true);
    TridentTopology topology = new TridentTopology();
    Stream stream = topology.newStream("spout1", spout).parallelismHint(16).each(new Fields("sentence"), new Split(), new Fields("word")).window(windowConfig, windowStore, new Fields("word"), new CountAsAggregator(), new Fields("count")).peek(new Consumer() {

        @Override
        public void accept(TridentTuple input) {
            LOG.info("Received tuple: [{}]", input);
        }
    });
    return topology.build();
}
Also used : FixedBatchSpout(org.apache.storm.trident.testing.FixedBatchSpout) Fields(org.apache.storm.tuple.Fields) Consumer(org.apache.storm.trident.operation.Consumer) TridentTopology(org.apache.storm.trident.TridentTopology) CountAsAggregator(org.apache.storm.trident.testing.CountAsAggregator) Values(org.apache.storm.tuple.Values) Stream(org.apache.storm.trident.Stream) Split(org.apache.storm.trident.testing.Split) TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Example 3 with TridentTuple

use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.

the class TestHdfsDataSourcesProvider method testHdfsSink.

@SuppressWarnings("unchecked")
@Test
public void testHdfsSink() {
    ISqlTridentDataSource ds = DataSourcesRegistry.constructTridentDataSource(URI.create(hdfsURI), null, null, TBL_PROPERTIES, FIELDS);
    Assert.assertNotNull(ds);
    ISqlTridentDataSource.SqlTridentConsumer consumer = ds.getConsumer();
    Assert.assertEquals(HdfsStateFactory.class, consumer.getStateFactory().getClass());
    Assert.assertEquals(HdfsUpdater.class, consumer.getStateUpdater().getClass());
    HdfsState state = (HdfsState) consumer.getStateFactory().makeState(Collections.emptyMap(), null, 0, 1);
    StateUpdater stateUpdater = consumer.getStateUpdater();
    HdfsFileOptions options = mock(HdfsFileOptions.class);
    Whitebox.setInternalState(state, "options", options);
    List<TridentTuple> tupleList = mockTupleList();
    for (TridentTuple t : tupleList) {
        stateUpdater.updateState(state, Collections.singletonList(t), null);
        try {
            verify(options).execute(Collections.singletonList(t));
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }
}
Also used : ISqlTridentDataSource(org.apache.storm.sql.runtime.ISqlTridentDataSource) IOException(java.io.IOException) HdfsState(org.apache.storm.hdfs.trident.HdfsState) StateUpdater(org.apache.storm.trident.state.StateUpdater) HdfsFileOptions(org.apache.storm.hdfs.trident.HdfsState.HdfsFileOptions) TridentTuple(org.apache.storm.trident.tuple.TridentTuple) Test(org.junit.Test)

Example 4 with TridentTuple

use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.

the class TestHdfsDataSourcesProvider method mockTupleList.

private static List<TridentTuple> mockTupleList() {
    List<TridentTuple> tupleList = new ArrayList<>();
    TridentTuple t0 = mock(TridentTuple.class);
    TridentTuple t1 = mock(TridentTuple.class);
    doReturn(1).when(t0).get(0);
    doReturn(2).when(t1).get(0);
    doReturn(Lists.<Object>newArrayList(1, "2")).when(t0).getValues();
    doReturn(Lists.<Object>newArrayList(2, "3")).when(t1).getValues();
    tupleList.add(t0);
    tupleList.add(t1);
    return tupleList;
}
Also used : ArrayList(java.util.ArrayList) TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Example 5 with TridentTuple

use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.

the class TestKafkaDataSourcesProvider method mockTupleList.

private static List<TridentTuple> mockTupleList() {
    List<TridentTuple> tupleList = new ArrayList<>();
    TridentTuple t0 = mock(TridentTuple.class);
    TridentTuple t1 = mock(TridentTuple.class);
    doReturn(1).when(t0).get(0);
    doReturn(2).when(t1).get(0);
    doReturn(Lists.<Object>newArrayList(1, "2")).when(t0).getValues();
    doReturn(Lists.<Object>newArrayList(2, "3")).when(t1).getValues();
    tupleList.add(t0);
    tupleList.add(t1);
    return tupleList;
}
Also used : TridentTuple(org.apache.storm.trident.tuple.TridentTuple)

Aggregations

TridentTuple (org.apache.storm.trident.tuple.TridentTuple)46 ArrayList (java.util.ArrayList)18 FailedException (org.apache.storm.topology.FailedException)11 List (java.util.List)10 Values (org.apache.storm.tuple.Values)8 ISqlTridentDataSource (org.apache.storm.sql.runtime.ISqlTridentDataSource)6 Test (org.junit.Test)6 HashMap (java.util.HashMap)5 TridentTopology (org.apache.storm.trident.TridentTopology)5 Consumer (org.apache.storm.trident.operation.Consumer)5 StateUpdater (org.apache.storm.trident.state.StateUpdater)5 Stream (org.apache.storm.trident.Stream)4 Fields (org.apache.storm.tuple.Fields)4 Map (java.util.Map)3 FixedBatchSpout (org.apache.storm.trident.testing.FixedBatchSpout)3 BatchStatement (com.datastax.driver.core.BatchStatement)2 Statement (com.datastax.driver.core.Statement)2 IOException (java.io.IOException)2 Future (java.util.concurrent.Future)2 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)2