Search in sources :

Example 21 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPRescalingTest method testCEPFunctionScalingDown.

@Test
public void testCEPFunctionScalingDown() throws Exception {
    int maxParallelism = 10;
    KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {

        private static final long serialVersionUID = -4873366487571254798L;

        @Override
        public Integer getKey(Event value) throws Exception {
            return value.getId();
        }
    };
    // create some valid pattern events on predetermined key groups and task indices
    // this will go to task index 0
    Event startEvent1 = new Event(7, "start", 1.0);
    SubEvent middleEvent1 = new SubEvent(7, "foo", 1.0, 10.0);
    Event endEvent1 = new Event(7, "end", 1.0);
    // verification of the key choice
    int keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent1), maxParallelism);
    assertEquals(1, keygroup);
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // this will go to task index 1
    Event startEvent2 = new Event(45, "start", 1.0);
    SubEvent middleEvent2 = new SubEvent(45, "foo", 1.0, 10.0);
    Event endEvent2 = new Event(45, "end", 1.0);
    keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent2), maxParallelism);
    assertEquals(6, keygroup);
    assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // this will go to task index 0
    Event startEvent3 = new Event(90, "start", 1.0);
    SubEvent middleEvent3 = new SubEvent(90, "foo", 1.0, 10.0);
    Event endEvent3 = new Event(90, "end", 1.0);
    keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent3), maxParallelism);
    assertEquals(2, keygroup);
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // this will go to task index 2
    Event startEvent4 = new Event(10, "start", 1.0);
    SubEvent middleEvent4 = new SubEvent(10, "foo", 1.0, 10.0);
    Event endEvent4 = new Event(10, "end", 1.0);
    keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent4), maxParallelism);
    assertEquals(9, keygroup);
    assertEquals(2, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // starting the test, we will go from parallelism of 3 to parallelism of 2
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness1 = getTestHarness(maxParallelism, 3, 0);
    harness1.open();
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness2 = getTestHarness(maxParallelism, 3, 1);
    harness2.open();
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness3 = getTestHarness(maxParallelism, 3, 2);
    harness3.open();
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness4 = null;
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness5 = null;
    try {
        harness1.processWatermark(Long.MIN_VALUE);
        harness2.processWatermark(Long.MIN_VALUE);
        harness3.processWatermark(Long.MIN_VALUE);
        // valid element
        harness1.processElement(new StreamRecord<>(startEvent1, 1));
        harness1.processElement(new StreamRecord<>(new Event(7, "foobar", 1.0), 2));
        // valid element
        harness1.processElement(new StreamRecord<Event>(middleEvent1, 3));
        // valid element
        harness1.processElement(new StreamRecord<>(endEvent1, 5));
        // till here we have a valid sequence, so after creating the
        // new instance and sending it a watermark, we expect it to fire,
        // even with no new elements.
        harness1.processElement(new StreamRecord<>(startEvent3, 10));
        harness1.processElement(new StreamRecord<>(startEvent1, 10));
        harness2.processElement(new StreamRecord<>(startEvent2, 7));
        harness2.processElement(new StreamRecord<Event>(middleEvent2, 8));
        harness3.processElement(new StreamRecord<>(startEvent4, 15));
        harness3.processElement(new StreamRecord<Event>(middleEvent4, 16));
        harness3.processElement(new StreamRecord<>(endEvent4, 17));
        // so far we only have the initial watermark
        assertEquals(1, harness1.getOutput().size());
        verifyWatermark(harness1.getOutput().poll(), Long.MIN_VALUE);
        assertEquals(1, harness2.getOutput().size());
        verifyWatermark(harness2.getOutput().poll(), Long.MIN_VALUE);
        assertEquals(1, harness3.getOutput().size());
        verifyWatermark(harness3.getOutput().poll(), Long.MIN_VALUE);
        // we take a snapshot and make it look as a single operator
        // this will be the initial state of all downstream tasks.
        OperatorSubtaskState snapshot = AbstractStreamOperatorTestHarness.repackageState(harness2.snapshot(0, 0), harness1.snapshot(0, 0), harness3.snapshot(0, 0));
        OperatorSubtaskState initState1 = AbstractStreamOperatorTestHarness.repartitionOperatorState(snapshot, maxParallelism, 3, 2, 0);
        OperatorSubtaskState initState2 = AbstractStreamOperatorTestHarness.repartitionOperatorState(snapshot, maxParallelism, 3, 2, 1);
        harness4 = getTestHarness(maxParallelism, 2, 0);
        harness4.setup();
        harness4.initializeState(initState1);
        harness4.open();
        harness5 = getTestHarness(maxParallelism, 2, 1);
        harness5.setup();
        harness5.initializeState(initState2);
        harness5.open();
        harness5.processElement(new StreamRecord<>(endEvent2, 11));
        harness5.processWatermark(new Watermark(12));
        verifyPattern(harness5.getOutput().poll(), startEvent2, middleEvent2, endEvent2);
        verifyWatermark(harness5.getOutput().poll(), 12);
        // if element timestamps are not correctly checkpointed/restored this will lead to
        // a pruning time underflow exception in NFA
        harness4.processWatermark(new Watermark(12));
        assertEquals(2, harness4.getOutput().size());
        verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
        verifyWatermark(harness4.getOutput().poll(), 12);
        // valid element
        harness4.processElement(new StreamRecord<Event>(middleEvent3, 15));
        // valid element
        harness4.processElement(new StreamRecord<>(endEvent3, 16));
        // valid element
        harness4.processElement(new StreamRecord<Event>(middleEvent1, 15));
        // valid element
        harness4.processElement(new StreamRecord<>(endEvent1, 16));
        harness4.processWatermark(new Watermark(Long.MAX_VALUE));
        harness5.processWatermark(new Watermark(Long.MAX_VALUE));
        // verify result
        assertEquals(3, harness4.getOutput().size());
        // check the order of the events in the output
        Queue<Object> output = harness4.getOutput();
        StreamRecord<?> resultRecord = (StreamRecord<?>) output.peek();
        assertTrue(resultRecord.getValue() instanceof Map);
        @SuppressWarnings("unchecked") Map<String, List<Event>> patternMap = (Map<String, List<Event>>) resultRecord.getValue();
        if (patternMap.get("start").get(0).getId() == 7) {
            verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
            verifyPattern(harness4.getOutput().poll(), startEvent3, middleEvent3, endEvent3);
        } else {
            verifyPattern(harness4.getOutput().poll(), startEvent3, middleEvent3, endEvent3);
            verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
        }
        // after scaling down this should end up here
        assertEquals(2, harness5.getOutput().size());
        verifyPattern(harness5.getOutput().poll(), startEvent4, middleEvent4, endEvent4);
    } finally {
        closeSilently(harness1);
        closeSilently(harness2);
        closeSilently(harness3);
        closeSilently(harness4);
        closeSilently(harness5);
    }
}
Also used : SubEvent(org.apache.flink.cep.SubEvent) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) KeySelector(org.apache.flink.api.java.functions.KeySelector) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) List(java.util.List) Map(java.util.Map) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 22 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class NFAStateAccessTest method testIterativeWithABACPattern.

@Test
public void testIterativeWithABACPattern() throws Exception {
    List<StreamRecord<Event>> inputEvents = new ArrayList<>();
    final Event startEvent1 = new Event(40, "start", 1.0);
    final Event startEvent2 = new Event(40, "start", 2.0);
    final Event startEvent3 = new Event(40, "start", 3.0);
    final Event startEvent4 = new Event(40, "start", 4.0);
    final SubEvent middleEvent1 = new SubEvent(41, "foo1", 1.0, 10);
    final SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10);
    final SubEvent middleEvent3 = new SubEvent(43, "foo3", 3.0, 10);
    final SubEvent middleEvent4 = new SubEvent(43, "foo4", 1.0, 10);
    final Event endEvent = new Event(46, "end", 1.0);
    // 1
    inputEvents.add(new StreamRecord<>(startEvent1, 1L));
    // 1
    inputEvents.add(new StreamRecord<Event>(middleEvent1, 2L));
    // 2
    inputEvents.add(new StreamRecord<>(startEvent2, 2L));
    // 3
    inputEvents.add(new StreamRecord<>(startEvent3, 2L));
    // 2
    inputEvents.add(new StreamRecord<Event>(middleEvent2, 2L));
    // 4
    inputEvents.add(new StreamRecord<>(startEvent4, 2L));
    // 3
    inputEvents.add(new StreamRecord<Event>(middleEvent3, 2L));
    // 1
    inputEvents.add(new StreamRecord<Event>(middleEvent4, 2L));
    inputEvents.add(new StreamRecord<>(endEvent, 4L));
    Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 6215754202506583964L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("start");
        }
    }).followedByAny("middle1").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {

        private static final long serialVersionUID = 2178338526904474690L;

        @Override
        public boolean filter(SubEvent value) throws Exception {
            return value.getName().startsWith("foo");
        }
    }).followedBy("middle2").where(new IterativeCondition<Event>() {

        private static final long serialVersionUID = -1223388426808292695L;

        @Override
        public boolean filter(Event value, Context<Event> ctx) throws Exception {
            if (!value.getName().equals("start")) {
                return false;
            }
            double sum = 0.0;
            for (Event e : ctx.getEventsForPattern("middle2")) {
                sum += e.getPrice();
            }
            sum += value.getPrice();
            return Double.compare(sum, 5.0) <= 0;
        }
    }).oneOrMore().followedBy("end").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 562590474115118323L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("end");
        }
    });
    TestSharedBuffer<Event> sharedBuffer = TestSharedBuffer.createTestBuffer(Event.createTypeSerializer());
    NFATestHarness nfaTestHarness = NFATestHarness.forPattern(pattern).withSharedBuffer(sharedBuffer).build();
    nfaTestHarness.consumeRecords(inputEvents);
    assertEquals(90, sharedBuffer.getStateReads());
    assertEquals(31, sharedBuffer.getStateWrites());
    assertEquals(121, sharedBuffer.getStateAccesses());
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) SubEvent(org.apache.flink.cep.SubEvent) SimpleCondition(org.apache.flink.cep.pattern.conditions.SimpleCondition) ArrayList(java.util.ArrayList) IterativeCondition(org.apache.flink.cep.pattern.conditions.IterativeCondition) NFATestHarness(org.apache.flink.cep.utils.NFATestHarness) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Test(org.junit.Test)

Example 23 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testCEPOperatorCleanupProcessingTime.

@Test
public void testCEPOperatorCleanupProcessingTime() throws Exception {
    Event startEvent1 = new Event(42, "start", 1.0);
    Event startEvent2 = new Event(42, "start", 2.0);
    SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
    SubEvent middleEvent2 = new SubEvent(42, "foo2", 1.0, 10.0);
    SubEvent middleEvent3 = new SubEvent(42, "foo3", 1.0, 10.0);
    Event endEvent1 = new Event(42, "end", 1.0);
    Event endEvent2 = new Event(42, "end", 2.0);
    Event startEventK2 = new Event(43, "start", 1.0);
    CepOperator<Event, Integer, Map<String, List<Event>>> operator = getKeyedCepOperator(true);
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
    try {
        harness.open();
        harness.setProcessingTime(0L);
        harness.processElement(new StreamRecord<>(startEvent1, 1L));
        harness.processElement(new StreamRecord<>(startEventK2, 1L));
        harness.processElement(new StreamRecord<>(new Event(42, "foobar", 1.0), 2L));
        harness.processElement(new StreamRecord<Event>(middleEvent1, 2L));
        harness.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3L));
        assertTrue(!operator.hasNonEmptyPQ(42));
        assertTrue(!operator.hasNonEmptyPQ(43));
        assertTrue(operator.hasNonEmptySharedBuffer(42));
        assertTrue(operator.hasNonEmptySharedBuffer(43));
        harness.setProcessingTime(3L);
        harness.processElement(new StreamRecord<>(startEvent2, 3L));
        harness.processElement(new StreamRecord<Event>(middleEvent2, 4L));
        OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
        harness.close();
        CepOperator<Event, Integer, Map<String, List<Event>>> operator2 = getKeyedCepOperator(true);
        harness = CepOperatorTestUtilities.getCepTestHarness(operator2);
        harness.setup();
        harness.initializeState(snapshot);
        harness.open();
        harness.setProcessingTime(3L);
        harness.processElement(new StreamRecord<>(endEvent1, 5L));
        verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
        verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent2, endEvent1);
        verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent1);
        harness.setProcessingTime(11L);
        harness.processElement(new StreamRecord<Event>(middleEvent3, 11L));
        harness.processElement(new StreamRecord<>(endEvent2, 12L));
        verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent2);
        verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent3, endEvent2);
        harness.setProcessingTime(21L);
        assertTrue(operator2.hasNonEmptySharedBuffer(42));
        harness.processElement(new StreamRecord<>(startEvent1, 21L));
        assertTrue(operator2.hasNonEmptySharedBuffer(42));
        harness.setProcessingTime(49L);
        // TODO: 3/13/17 we have to have another event in order to clean up
        harness.processElement(new StreamRecord<>(new Event(42, "foobar", 1.0), 2L));
        // the pattern expired
        assertTrue(!operator2.hasNonEmptySharedBuffer(42));
        assertEquals(0L, harness.numEventTimeTimers());
        assertTrue(!operator2.hasNonEmptyPQ(42));
        assertTrue(!operator2.hasNonEmptyPQ(43));
    } finally {
        harness.close();
    }
}
Also used : SubEvent(org.apache.flink.cep.SubEvent) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 24 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testKeyedCEPOperatorCheckpointingWithRocksDB.

@Test
public void testKeyedCEPOperatorCheckpointingWithRocksDB() throws Exception {
    String rocksDbPath = tempFolder.newFolder().getAbsolutePath();
    RocksDBStateBackend rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend(), TernaryBoolean.FALSE);
    rocksDBStateBackend.setDbStoragePath(rocksDbPath);
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = getCepTestHarness(false);
    try {
        harness.setStateBackend(rocksDBStateBackend);
        harness.open();
        Event startEvent = new Event(42, "start", 1.0);
        SubEvent middleEvent = new SubEvent(42, "foo", 1.0, 10.0);
        Event endEvent = new Event(42, "end", 1.0);
        harness.processElement(new StreamRecord<>(startEvent, 1L));
        harness.processElement(new StreamRecord<>(new Event(42, "foobar", 1.0), 2L));
        // simulate snapshot/restore with some elements in internal sorting queue
        OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
        harness.close();
        harness = getCepTestHarness(false);
        rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend());
        rocksDBStateBackend.setDbStoragePath(rocksDbPath);
        harness.setStateBackend(rocksDBStateBackend);
        harness.setup();
        harness.initializeState(snapshot);
        harness.open();
        harness.processWatermark(new Watermark(Long.MIN_VALUE));
        harness.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3L));
        // if element timestamps are not correctly checkpointed/restored this will lead to
        // a pruning time underflow exception in NFA
        harness.processWatermark(new Watermark(2L));
        // simulate snapshot/restore with empty element queue but NFA state
        OperatorSubtaskState snapshot2 = harness.snapshot(1L, 1L);
        harness.close();
        harness = getCepTestHarness(false);
        rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend());
        rocksDBStateBackend.setDbStoragePath(rocksDbPath);
        harness.setStateBackend(rocksDBStateBackend);
        harness.setup();
        harness.initializeState(snapshot2);
        harness.open();
        harness.processElement(new StreamRecord<Event>(middleEvent, 3L));
        harness.processElement(new StreamRecord<>(new Event(42, "start", 1.0), 4L));
        harness.processElement(new StreamRecord<>(endEvent, 5L));
        harness.processWatermark(new Watermark(Long.MAX_VALUE));
        // get and verify the output
        Queue<Object> result = harness.getOutput();
        assertEquals(2, result.size());
        verifyPattern(result.poll(), startEvent, middleEvent, endEvent);
        verifyWatermark(result.poll(), Long.MAX_VALUE);
    } finally {
        harness.close();
    }
}
Also used : RocksDBStateBackend(org.apache.flink.contrib.streaming.state.RocksDBStateBackend) SubEvent(org.apache.flink.cep.SubEvent) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 25 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testKeyedCEPOperatorNFAUpdateTimes.

@Test
public void testKeyedCEPOperatorNFAUpdateTimes() throws Exception {
    CepOperator<Event, Integer, Map<String, List<Event>>> operator = CepOperatorTestUtilities.getKeyedCepOperator(true, new SimpleNFAFactory());
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
    try {
        harness.open();
        final ValueState nfaOperatorState = (ValueState) Whitebox.<ValueState>getInternalState(operator, "computationStates");
        final ValueState nfaOperatorStateSpy = Mockito.spy(nfaOperatorState);
        Whitebox.setInternalState(operator, "computationStates", nfaOperatorStateSpy);
        Event startEvent = new Event(42, "c", 1.0);
        SubEvent middleEvent = new SubEvent(42, "a", 1.0, 10.0);
        Event endEvent = new Event(42, "b", 1.0);
        harness.processElement(new StreamRecord<>(startEvent, 1L));
        harness.processElement(new StreamRecord<>(new Event(42, "d", 1.0), 4L));
        harness.processElement(new StreamRecord<Event>(middleEvent, 4L));
        harness.processElement(new StreamRecord<>(endEvent, 4L));
        // verify the number of invocations NFA is updated
        Mockito.verify(nfaOperatorStateSpy, Mockito.times(3)).update(Mockito.any());
        // get and verify the output
        Queue<Object> result = harness.getOutput();
        assertEquals(1, result.size());
        verifyPattern(result.poll(), startEvent, middleEvent, endEvent);
    } finally {
        harness.close();
    }
}
Also used : ValueState(org.apache.flink.api.common.state.ValueState) SubEvent(org.apache.flink.cep.SubEvent) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) Test(org.junit.Test)

Aggregations

Event (org.apache.flink.cep.Event)27 SubEvent (org.apache.flink.cep.SubEvent)27 Test (org.junit.Test)27 Map (java.util.Map)21 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)15 Watermark (org.apache.flink.streaming.api.watermark.Watermark)14 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)13 HashMap (java.util.HashMap)11 List (java.util.List)10 KeySelector (org.apache.flink.api.java.functions.KeySelector)9 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)8 ArrayList (java.util.ArrayList)7 SimpleCondition (org.apache.flink.cep.pattern.conditions.SimpleCondition)7 RocksDBStateBackend (org.apache.flink.contrib.streaming.state.RocksDBStateBackend)4 MemoryStateBackend (org.apache.flink.runtime.state.memory.MemoryStateBackend)4 Ignore (org.junit.Ignore)3 ValueState (org.apache.flink.api.common.state.ValueState)2 NullByteKeySelector (org.apache.flink.api.java.functions.NullByteKeySelector)2 IterativeCondition (org.apache.flink.cep.pattern.conditions.IterativeCondition)2 NFA (org.apache.flink.cep.nfa.NFA)1