use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.
the class TestRedisDataSourcesProvider method testRedisClusterSink.
@SuppressWarnings("unchecked")
@Test
public void testRedisClusterSink() throws IOException {
ISqlTridentDataSource ds = DataSourcesRegistry.constructTridentDataSource(URI.create("redis://localhost:6380"), null, null, CLUSTER_TBL_PROPERTIES, FIELDS);
Assert.assertNotNull(ds);
ISqlTridentDataSource.SqlTridentConsumer consumer = ds.getConsumer();
Assert.assertEquals(RedisClusterState.Factory.class, consumer.getStateFactory().getClass());
Assert.assertEquals(RedisClusterStateUpdater.class, consumer.getStateUpdater().getClass());
RedisClusterState state = (RedisClusterState) consumer.getStateFactory().makeState(Collections.emptyMap(), null, 0, 1);
StateUpdater stateUpdater = consumer.getStateUpdater();
JedisCluster mockJedisCluster = mock(JedisCluster.class);
Whitebox.setInternalState(state, "jedisCluster", mockJedisCluster);
List<TridentTuple> tupleList = mockTupleList();
stateUpdater.updateState(state, tupleList, null);
for (TridentTuple t : tupleList) {
// PK goes to the key
String id = String.valueOf(t.getValueByField("ID"));
String serializedValue = new String(SERIALIZER.write(t.getValues(), null).array());
verify(mockJedisCluster).hset(eq(ADDITIONAL_KEY), eq(id), eq(serializedValue));
}
verify(mockJedisCluster, never()).close();
}
use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.
the class TestRedisDataSourcesProvider method testRedisSink.
@SuppressWarnings("unchecked")
@Test
public void testRedisSink() {
ISqlTridentDataSource ds = DataSourcesRegistry.constructTridentDataSource(URI.create("redis://:foobared@localhost:6380/2"), null, null, TBL_PROPERTIES, FIELDS);
Assert.assertNotNull(ds);
ISqlTridentDataSource.SqlTridentConsumer consumer = ds.getConsumer();
Assert.assertEquals(RedisState.Factory.class, consumer.getStateFactory().getClass());
Assert.assertEquals(RedisStateUpdater.class, consumer.getStateUpdater().getClass());
RedisState state = (RedisState) consumer.getStateFactory().makeState(Collections.emptyMap(), null, 0, 1);
StateUpdater stateUpdater = consumer.getStateUpdater();
JedisPool mockJedisPool = mock(JedisPool.class);
Jedis mockJedis = mock(Jedis.class);
Pipeline mockPipeline = mock(Pipeline.class);
Whitebox.setInternalState(state, "jedisPool", mockJedisPool);
when(mockJedisPool.getResource()).thenReturn(mockJedis);
when(mockJedis.pipelined()).thenReturn(mockPipeline);
List<TridentTuple> tupleList = mockTupleList();
stateUpdater.updateState(state, tupleList, null);
for (TridentTuple t : tupleList) {
// PK goes to the key
String id = String.valueOf(t.getValueByField("ID"));
String serializedValue = new String(SERIALIZER.write(t.getValues(), null).array());
verify(mockPipeline).hset(eq(ADDITIONAL_KEY), eq(id), eq(serializedValue));
}
verify(mockPipeline).sync();
verify(mockJedis).close();
}
use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.
the class TestKafkaDataSourcesProvider method testKafkaSink.
@SuppressWarnings("unchecked")
@Test
public void testKafkaSink() {
ISqlTridentDataSource ds = DataSourcesRegistry.constructTridentDataSource(URI.create("kafka://mock?topic=foo"), null, null, TBL_PROPERTIES, FIELDS);
Assert.assertNotNull(ds);
ISqlTridentDataSource.SqlTridentConsumer consumer = ds.getConsumer();
Assert.assertEquals(TridentKafkaStateFactory.class, consumer.getStateFactory().getClass());
Assert.assertEquals(TridentKafkaUpdater.class, consumer.getStateUpdater().getClass());
TridentKafkaState state = (TridentKafkaState) consumer.getStateFactory().makeState(Collections.emptyMap(), null, 0, 1);
KafkaProducer producer = mock(KafkaProducer.class);
doReturn(mock(Future.class)).when(producer).send(any(ProducerRecord.class));
Whitebox.setInternalState(state, "producer", producer);
List<TridentTuple> tupleList = mockTupleList();
for (TridentTuple t : tupleList) {
state.updateState(Collections.singletonList(t), null);
verify(producer).send(argThat(new KafkaMessageMatcher(t)));
}
verifyNoMoreInteractions(producer);
}
use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.
the class TestMongoDataSourcesProvider method testMongoSink.
@SuppressWarnings("unchecked")
@Test
public void testMongoSink() {
ISqlTridentDataSource ds = DataSourcesRegistry.constructTridentDataSource(URI.create("mongodb://127.0.0.1:27017/test"), null, null, TBL_PROPERTIES, FIELDS);
Assert.assertNotNull(ds);
ISqlTridentDataSource.SqlTridentConsumer consumer = ds.getConsumer();
Assert.assertEquals(MongoStateFactory.class, consumer.getStateFactory().getClass());
Assert.assertEquals(MongoStateUpdater.class, consumer.getStateUpdater().getClass());
MongoState state = (MongoState) consumer.getStateFactory().makeState(Collections.emptyMap(), null, 0, 1);
StateUpdater stateUpdater = consumer.getStateUpdater();
MongoDBClient mongoClient = mock(MongoDBClient.class);
Whitebox.setInternalState(state, "mongoClient", mongoClient);
List<TridentTuple> tupleList = mockTupleList();
for (TridentTuple t : tupleList) {
stateUpdater.updateState(state, Collections.singletonList(t), null);
verify(mongoClient).insert(argThat(new MongoArgMatcher(t)), eq(true));
}
verifyNoMoreInteractions(mongoClient);
}
use of org.apache.storm.trident.tuple.TridentTuple in project storm by apache.
the class SampleDruidBoltTridentTopology method main.
public static void main(String[] args) throws Exception {
if (args.length == 0) {
throw new IllegalArgumentException("There should be at least one argument. Run as `SampleDruidBoltTridentTopology <zk-url>`");
}
TridentTopology tridentTopology = new TridentTopology();
DruidBeamFactory druidBeamFactory = new SampleDruidBeamFactoryImpl(new HashMap<String, Object>());
ITupleDruidEventMapper<Map<String, Object>> eventMapper = new TupleDruidEventMapper<>(TupleDruidEventMapper.DEFAULT_FIELD_NAME);
final Stream stream = tridentTopology.newStream("batch-event-gen", new SimpleBatchSpout(10));
stream.peek(new Consumer() {
@Override
public void accept(TridentTuple input) {
LOG.info("########### Received tuple: [{}]", input);
}
}).partitionPersist(new DruidBeamStateFactory<Map<String, Object>>(druidBeamFactory, eventMapper), new Fields("event"), new DruidBeamStateUpdater());
Config conf = new Config();
conf.setDebug(true);
conf.put("druid.tranquility.zk.connect", args[0]);
if (args.length > 1) {
conf.setNumWorkers(3);
StormSubmitter.submitTopologyWithProgressBar(args[1], conf, tridentTopology.build());
} else {
conf.setMaxTaskParallelism(3);
try (LocalCluster cluster = new LocalCluster();
LocalTopology topo = cluster.submitTopology("druid-test", conf, tridentTopology.build())) {
Thread.sleep(30000);
}
System.exit(0);
}
}
Aggregations