use of org.apache.flink.cep.SubEvent in project flink by apache.
the class CEPMigrationTest method writeAndOrSubtypConditionsPatternAfterMigrationSnapshot.
/**
* Manually run this to write binary snapshot data.
*/
@Ignore
@Test
public void writeAndOrSubtypConditionsPatternAfterMigrationSnapshot() throws Exception {
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
final Event startEvent1 = new SubEvent(42, "start", 1.0, 6.0);
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = new KeyedOneInputStreamOperatorTestHarness<>(CepOperatorTestUtilities.getKeyedCepOperator(false, new NFAComplexConditionsFactory()), keySelector, BasicTypeInfo.INT_TYPE_INFO);
try {
harness.setup();
harness.open();
harness.processElement(new StreamRecord<>(startEvent1, 5));
harness.processWatermark(new Watermark(6));
// do snapshot and save to file
OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/cep-migration-conditions-flink" + flinkGenerateSavepointVersion + "-snapshot");
} finally {
harness.close();
}
}
use of org.apache.flink.cep.SubEvent in project flink by apache.
the class NFAITCase method testBranchingPatternSkipTillNext.
// ///////////////////////////////////// Skip till next /////////////////////////////
@Test
public void testBranchingPatternSkipTillNext() throws Exception {
List<StreamRecord<Event>> inputEvents = new ArrayList<>();
Event startEvent = new Event(40, "start", 1.0);
SubEvent middleEvent1 = new SubEvent(41, "foo1", 1.0, 10.0);
SubEvent middleEvent2 = new SubEvent(42, "foo2", 1.0, 10.0);
SubEvent middleEvent3 = new SubEvent(43, "foo3", 1.0, 10.0);
SubEvent nextOne1 = new SubEvent(44, "next-one", 1.0, 2.0);
SubEvent nextOne2 = new SubEvent(45, "next-one", 1.0, 2.0);
Event endEvent = new Event(46, "end", 1.0);
inputEvents.add(new StreamRecord<>(startEvent, 1));
inputEvents.add(new StreamRecord<Event>(middleEvent1, 3));
inputEvents.add(new StreamRecord<Event>(middleEvent2, 4));
inputEvents.add(new StreamRecord<Event>(middleEvent3, 5));
inputEvents.add(new StreamRecord<Event>(nextOne1, 6));
inputEvents.add(new StreamRecord<Event>(nextOne2, 7));
inputEvents.add(new StreamRecord<>(endEvent, 8));
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle-first").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getVolume() > 5.0;
}
}).followedBy("middle-second").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("next-one");
}
}).followedByAny("end").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
});
NFA<Event> nfa = compile(pattern, false);
final List<List<Event>> patterns = feedNFA(inputEvents, nfa);
comparePatterns(patterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(startEvent, middleEvent1, nextOne1, endEvent)));
}
use of org.apache.flink.cep.SubEvent in project flink by apache.
the class NFAITCase method testBranchingPatternMixedFollowedBy.
@Test
public void testBranchingPatternMixedFollowedBy() throws Exception {
List<StreamRecord<Event>> inputEvents = new ArrayList<>();
Event startEvent = new Event(40, "start", 1.0);
SubEvent middleEvent1 = new SubEvent(41, "foo1", 1.0, 10.0);
SubEvent middleEvent2 = new SubEvent(42, "foo2", 1.0, 10.0);
SubEvent middleEvent3 = new SubEvent(43, "foo3", 1.0, 10.0);
SubEvent nextOne1 = new SubEvent(44, "next-one", 1.0, 2.0);
SubEvent nextOne2 = new SubEvent(45, "next-one", 1.0, 2.0);
Event endEvent = new Event(46, "end", 1.0);
inputEvents.add(new StreamRecord<>(startEvent, 1));
inputEvents.add(new StreamRecord<Event>(middleEvent1, 3));
inputEvents.add(new StreamRecord<Event>(middleEvent2, 4));
inputEvents.add(new StreamRecord<Event>(middleEvent3, 5));
inputEvents.add(new StreamRecord<Event>(nextOne1, 6));
inputEvents.add(new StreamRecord<Event>(nextOne2, 7));
inputEvents.add(new StreamRecord<>(endEvent, 8));
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedByAny("middle-first").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getVolume() > 5.0;
}
}).followedBy("middle-second").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getName().equals("next-one");
}
}).followedByAny("end").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
});
NFA<Event> nfa = compile(pattern, false);
final List<List<Event>> patterns = feedNFA(inputEvents, nfa);
comparePatterns(patterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(startEvent, middleEvent1, nextOne1, endEvent), Lists.newArrayList(startEvent, middleEvent2, nextOne1, endEvent), Lists.newArrayList(startEvent, middleEvent3, nextOne1, endEvent)));
}
use of org.apache.flink.cep.SubEvent in project flink by apache.
the class NFAITCase method testSimplePatternNFA.
@Test
public void testSimplePatternNFA() throws Exception {
List<StreamRecord<Event>> inputEvents = new ArrayList<>();
Event startEvent = new Event(41, "start", 1.0);
SubEvent middleEvent = new SubEvent(42, "foo", 1.0, 10.0);
Event endEvent = new Event(43, "end", 1.0);
inputEvents.add(new StreamRecord<>(startEvent, 1));
inputEvents.add(new StreamRecord<>(new Event(43, "foobar", 1.0), 2));
inputEvents.add(new StreamRecord<Event>(new SubEvent(41, "barfoo", 1.0, 5.0), 3));
inputEvents.add(new StreamRecord<Event>(middleEvent, 3));
inputEvents.add(new StreamRecord<>(new Event(43, "start", 1.0), 4));
inputEvents.add(new StreamRecord<>(endEvent, 5));
Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 5726188262756267490L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("start");
}
}).followedBy("middle").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {
private static final long serialVersionUID = 6215754202506583964L;
@Override
public boolean filter(SubEvent value) throws Exception {
return value.getVolume() > 5.0;
}
}).followedBy("end").where(new SimpleCondition<Event>() {
private static final long serialVersionUID = 7056763917392056548L;
@Override
public boolean filter(Event value) throws Exception {
return value.getName().equals("end");
}
});
NFA<Event> nfa = compile(pattern, false);
List<List<Event>> resultingPatterns = feedNFA(inputEvents, nfa);
comparePatterns(resultingPatterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(startEvent, middleEvent, endEvent)));
}
use of org.apache.flink.cep.SubEvent in project flink by apache.
the class CEPRescalingTest method testCEPFunctionScalingUp.
@Test
public void testCEPFunctionScalingUp() throws Exception {
int maxParallelism = 10;
KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {
private static final long serialVersionUID = -4873366487571254798L;
@Override
public Integer getKey(Event value) throws Exception {
return value.getId();
}
};
// valid pattern events belong to different keygroups
// that will be shipped to different tasks when changing parallelism.
Event startEvent1 = new Event(7, "start", 1.0);
SubEvent middleEvent1 = new SubEvent(7, "foo", 1.0, 10.0);
Event endEvent1 = new Event(7, "end", 1.0);
int keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent1), maxParallelism);
assertEquals(1, keygroup);
assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
// this will go to task index 2
Event startEvent2 = new Event(10, "start", 1.0);
SubEvent middleEvent2 = new SubEvent(10, "foo", 1.0, 10.0);
Event endEvent2 = new Event(10, "end", 1.0);
keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent2), maxParallelism);
assertEquals(9, keygroup);
assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
// now we start the test, we go from parallelism 1 to 2.
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = null;
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness1 = null;
OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness2 = null;
try {
harness = getTestHarness(maxParallelism, 1, 0);
harness.open();
// valid element
harness.processElement(new StreamRecord<>(startEvent1, 1));
harness.processElement(new StreamRecord<>(new Event(7, "foobar", 1.0), 2));
// valid element
harness.processElement(new StreamRecord<>(startEvent2, 3));
// valid element
harness.processElement(new StreamRecord<Event>(middleEvent2, 4));
// take a snapshot with some elements in internal sorting queue
OperatorSubtaskState snapshot = harness.snapshot(0, 0);
harness.close();
// initialize two sub-tasks with the previously snapshotted state to simulate scaling up
// we know that the valid element will go to index 0,
// so we initialize the two tasks and we put the rest of
// the valid elements for the pattern on task 0.
OperatorSubtaskState initState1 = AbstractStreamOperatorTestHarness.repartitionOperatorState(snapshot, maxParallelism, 1, 2, 0);
OperatorSubtaskState initState2 = AbstractStreamOperatorTestHarness.repartitionOperatorState(snapshot, maxParallelism, 1, 2, 1);
harness1 = getTestHarness(maxParallelism, 2, 0);
harness1.setup();
harness1.initializeState(initState1);
harness1.open();
// if element timestamps are not correctly checkpointed/restored this will lead to
// a pruning time underflow exception in NFA
harness1.processWatermark(new Watermark(2));
// valid element
harness1.processElement(new StreamRecord<Event>(middleEvent1, 3));
// valid element
harness1.processElement(new StreamRecord<>(endEvent1, 5));
harness1.processWatermark(new Watermark(Long.MAX_VALUE));
// watermarks and the result
assertEquals(3, harness1.getOutput().size());
verifyWatermark(harness1.getOutput().poll(), 2);
verifyPattern(harness1.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
harness2 = getTestHarness(maxParallelism, 2, 1);
harness2.setup();
harness2.initializeState(initState2);
harness2.open();
// now we move to the second parallel task
harness2.processWatermark(new Watermark(2));
harness2.processElement(new StreamRecord<>(endEvent2, 5));
harness2.processElement(new StreamRecord<>(new Event(42, "start", 1.0), 4));
harness2.processWatermark(new Watermark(Long.MAX_VALUE));
assertEquals(3, harness2.getOutput().size());
verifyWatermark(harness2.getOutput().poll(), 2);
verifyPattern(harness2.getOutput().poll(), startEvent2, middleEvent2, endEvent2);
} finally {
closeSilently(harness);
closeSilently(harness1);
closeSilently(harness2);
}
}
Aggregations