use of com.github.fhuss.kafka.streams.cep.Sequence in project kafkastreams-cep by fhussonnois.
the class NFATest method testComplexPatternWithState.
/**
* PATTERN SEQ(Stock+ a[ ], Stock b)
* WHERE skip_till_next_match(a[ ], b) {
* [symbol]
* and
* a[1].volume > 1000
* and
* a[i].price > avg(a[..i-1].price)
* and
* b.volume < 80%*a[a.LEN].volume }
* WITHIN 1 hour
*/
@Test
public void testComplexPatternWithState() {
StockEvent e1 = new StockEvent("e1", 100, 1010);
StockEvent e2 = new StockEvent("e2", 120, 990);
StockEvent e3 = new StockEvent("e3", 120, 1005);
StockEvent e4 = new StockEvent("e4", 121, 999);
StockEvent e5 = new StockEvent("e5", 120, 999);
StockEvent e6 = new StockEvent("e6", 125, 750);
StockEvent e7 = new StockEvent("e7", 120, 950);
StockEvent e8 = new StockEvent("e8", 120, 700);
Pattern<String, StockEvent> pattern = new QueryBuilder<String, StockEvent>().select().where((k, v, ts, store) -> v.volume > 1000).<Long>fold("avg", (k, v, curr) -> v.price).then().select().zeroOrMore().skipTillNextMatch().where((k, v, ts, state) -> v.price > (long) state.get("avg")).<Long>fold("avg", (k, v, curr) -> (curr + v.price) / 2).<Long>fold("volume", (k, v, curr) -> v.volume).then().select().skipTillNextMatch().where((k, v, ts, state) -> v.volume < 0.8 * (long) state.getOrElse("volume", 0L)).within(1, TimeUnit.HOURS).build();
final NFA<String, StockEvent> nfa = newNFA(pattern, Serdes.String(), new StockEventSerde());
AtomicLong offset = new AtomicLong(0);
List<Event<Object, StockEvent>> collect = Arrays.asList(new StockEvent[] { e1, e2, e3, e4, e5, e6, e7, e8 }).stream().map(e -> new Event<>(null, e, System.currentTimeMillis(), "test", 0, offset.getAndIncrement())).collect(Collectors.toList());
List<Sequence<String, StockEvent>> s = simulate(nfa, collect.toArray(new Event[collect.size()]));
assertEquals(4, s.size());
}
use of com.github.fhuss.kafka.streams.cep.Sequence in project kafkastreams-cep by fhussonnois.
the class CEPStockKStreamsIntegrationTest method test.
@Test
public void test() throws ExecutionException, InterruptedException {
final Collection<KeyValue<String, String>> batch1 = Arrays.asList(new KeyValue<>(null, "{\"name\":\"e1\",\"price\":100,\"volume\":1010}"), new KeyValue<>(null, "{\"name\":\"e2\",\"price\":120,\"volume\":990}"), new KeyValue<>(null, "{\"name\":\"e3\",\"price\":120,\"volume\":1005}"), new KeyValue<>(null, "{\"name\":\"e4\",\"price\":121,\"volume\":999}"), new KeyValue<>(null, "{\"name\":\"e5\",\"price\":120,\"volume\":999}"), new KeyValue<>(null, "{\"name\":\"e6\",\"price\":125,\"volume\":750}"), new KeyValue<>(null, "{\"name\":\"e7\",\"price\":120,\"volume\":950}"), new KeyValue<>(null, "{\"name\":\"e8\",\"price\":120,\"volume\":700}"));
IntegrationTestUtils.produceKeyValuesSynchronously(INPUT_STREAM, batch1, TestUtils.producerConfig(CLUSTER.bootstrapServers(), StringSerializer.class, StringSerializer.class, new Properties()), mockTime);
// build query
ComplexStreamsBuilder builder = new ComplexStreamsBuilder();
CEPStream<String, StockEvent> stream = builder.stream(INPUT_STREAM);
KStream<String, Sequence<String, StockEvent>> stocks = stream.query("Stocks", Patterns.STOCKS);
stocks.mapValues(seq -> {
JSONObject json = new JSONObject();
seq.asMap().forEach((k, v) -> {
JSONArray events = new JSONArray();
json.put(k, events);
List<String> collect = v.stream().map(e -> e.value.name).collect(Collectors.toList());
Collections.reverse(collect);
collect.forEach(events::add);
});
return json.toJSONString();
}).through(OUTPUT_STREAM, Produced.with(null, Serdes.String())).print(Printed.toSysOut());
Topology topology = builder.build();
kafkaStreams = new KafkaStreams(topology, streamsConfiguration);
kafkaStreams.start();
final Properties consumerConfig = TestUtils.consumerConfig(CLUSTER.bootstrapServers(), StringDeserializer.class, StringDeserializer.class);
List<KeyValue<String, String>> result = IntegrationTestUtils.readKeyValues(OUTPUT_STREAM, consumerConfig, TimeUnit.SECONDS.toMillis(10), 4);
Assert.assertEquals(4, result.size());
Assert.assertEquals("{\"0\":[\"e1\"],\"1\":[\"e2\",\"e3\",\"e4\",\"e5\"],\"2\":[\"e6\"]}", result.get(0).value);
Assert.assertEquals("{\"0\":[\"e3\"],\"1\":[\"e4\"],\"2\":[\"e6\"]}", result.get(1).value);
Assert.assertEquals("{\"0\":[\"e1\"],\"1\":[\"e2\",\"e3\",\"e4\",\"e5\",\"e6\",\"e7\"],\"2\":[\"e8\"]}", result.get(2).value);
Assert.assertEquals("{\"0\":[\"e3\"],\"1\":[\"e4\",\"e6\"],\"2\":[\"e8\"]}", result.get(3).value);
}
Aggregations