use of org.apache.flink.cep.nfa.NFA in project flink by apache.
the class CEPOperatorTest method testCEPOperatorSerializationWRocksDB.
@Test
public void testCEPOperatorSerializationWRocksDB() throws Exception {
String rocksDbPath = tempFolder.newFolder().getAbsolutePath();
RocksDBStateBackend rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend());
rocksDBStateBackend.setDbStoragePath(rocksDbPath);
final Event startEvent1 = new Event(40, "start", 1.0);
final Event startEvent2 = new Event(40, "start", 2.0);
final SubEvent middleEvent1 = new SubEvent(40, "foo1", 1.0, 10);
final SubEvent middleEvent2 = new SubEvent(40, "foo2", 2.0, 10);
final SubEvent middleEvent3 = new SubEvent(40, "foo3", 3.0, 10);
final SubEvent middleEvent4 = new SubEvent(40, "foo4", 1.0, 10);
final Event nextOne = new Event(40, "next-one", 1.0);
final Event endEvent = new Event(40, "end", 1.0);
final Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle").subtype(SubEvent.class).where(new IterativeCondition<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value, Context<SubEvent> ctx) throws Exception {
if (!value.getName().startsWith("foo")) {
return false;
}
double sum = 0.0;
for (Event event : ctx.getEventsForPattern("middle")) {
sum += event.getPrice();
}
sum += value.getPrice();
return Double.compare(sum, 5.0) < 0;
}
}).oneOrMore().allowCombinations().followedBy("end").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
});
CepOperator<Event, Integer, Map<String, List<Event>>> operator = CepOperatorTestUtilities.getKeyedCepOperator(false, new NFACompiler.NFAFactory<Event>() {
private static final long serialVersionUID = 477082663248051994L;
@Override
public NFA<Event> createNFA() {
return NFACompiler.compileFactory(pattern, false).createNFA();
}
});
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
try {
harness.setStateBackend(rocksDBStateBackend);
harness.open();
harness.processWatermark(0L);
harness.processElement(new StreamRecord<>(startEvent1, 1));
harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
harness.processWatermark(2L);
harness.processElement(new StreamRecord<Event>(middleEvent3, 5));
harness.processElement(new StreamRecord<Event>(middleEvent2, 3));
harness.processElement(new StreamRecord<>(startEvent2, 4));
harness.processWatermark(5L);
harness.processElement(new StreamRecord<>(nextOne, 7));
harness.processElement(new StreamRecord<>(endEvent, 8));
harness.processElement(new StreamRecord<Event>(middleEvent4, 6));
harness.processWatermark(100L);
List<List<Event>> resultingPatterns = new ArrayList<>();
while (!harness.getOutput().isEmpty()) {
Object o = harness.getOutput().poll();
if (!(o instanceof Watermark)) {
StreamRecord<Map<String, List<Event>>> el = (StreamRecord<Map<String, List<Event>>>) o;
List<Event> res = new ArrayList<>();
for (List<Event> le : el.getValue().values()) {
res.addAll(le);
}
resultingPatterns.add(res);
}
}
compareMaps(resultingPatterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(startEvent1, endEvent, middleEvent1, middleEvent2, middleEvent4), Lists.newArrayList(startEvent1, endEvent, middleEvent2, middleEvent1), Lists.newArrayList(startEvent1, endEvent, middleEvent3, middleEvent1), Lists.newArrayList(startEvent2, endEvent, middleEvent3, middleEvent4), Lists.newArrayList(startEvent1, endEvent, middleEvent4, middleEvent1), Lists.newArrayList(startEvent1, endEvent, middleEvent1), Lists.newArrayList(startEvent2, endEvent, middleEvent3)));
} finally {
harness.close();
}
}
use of org.apache.flink.cep.nfa.NFA in project flink by apache.
the class AbstractKeyedCEPPatternOperator method restoreState.
////////////////////// Backwards Compatibility //////////////////////
@Override
public void restoreState(FSDataInputStream in) throws Exception {
// this is the flag indicating if we have udf
// state to restore (not needed here)
in.read();
DataInputViewStreamWrapper inputView = new DataInputViewStreamWrapper(in);
InternalWatermarkCallbackService<KEY> watermarkCallbackService = getInternalWatermarkCallbackService();
if (migratingFromOldKeyedOperator) {
int numberEntries = inputView.readInt();
for (int i = 0; i < numberEntries; i++) {
watermarkCallbackService.registerKeyForWatermarkCallback(keySerializer.deserialize(inputView));
}
} else {
final ObjectInputStream ois = new ObjectInputStream(in);
// retrieve the NFA
@SuppressWarnings("unchecked") NFA<IN> nfa = (NFA<IN>) ois.readObject();
// retrieve the elements that were pending in the priority queue
MultiplexingStreamRecordSerializer<IN> recordSerializer = new MultiplexingStreamRecordSerializer<>(inputSerializer);
PriorityQueue<StreamRecord<IN>> priorityQueue = priorityQueueFactory.createPriorityQueue();
int entries = ois.readInt();
for (int i = 0; i < entries; i++) {
StreamElement streamElement = recordSerializer.deserialize(inputView);
priorityQueue.offer(streamElement.<IN>asRecord());
}
// finally register the retrieved state with the new keyed state.
setCurrentKey((byte) 0);
nfaOperatorState.update(nfa);
priorityQueueOperatorState.update(priorityQueue);
if (!isProcessingTime) {
// this is relevant only for event/ingestion time
// need to work around type restrictions
InternalWatermarkCallbackService rawWatermarkCallbackService = (InternalWatermarkCallbackService) watermarkCallbackService;
rawWatermarkCallbackService.registerKeyForWatermarkCallback((byte) 0);
}
ois.close();
}
}
Aggregations