Search in sources :

Example 6 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testCEPOperatorCleanupEventTime.

@Test
public void testCEPOperatorCleanupEventTime() throws Exception {
    Event startEvent1 = new Event(42, "start", 1.0);
    Event startEvent2 = new Event(42, "start", 2.0);
    SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
    SubEvent middleEvent2 = new SubEvent(42, "foo2", 1.0, 10.0);
    SubEvent middleEvent3 = new SubEvent(42, "foo3", 1.0, 10.0);
    Event endEvent1 = new Event(42, "end", 1.0);
    Event endEvent2 = new Event(42, "end", 2.0);
    Event startEventK2 = new Event(43, "start", 1.0);
    CepOperator<Event, Integer, Map<String, List<Event>>> operator = getKeyedCepOperator(false);
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
    try {
        harness.open();
        harness.processWatermark(new Watermark(Long.MIN_VALUE));
        harness.processElement(new StreamRecord<>(new Event(42, "foobar", 1.0), 2L));
        harness.processElement(new StreamRecord<Event>(middleEvent1, 2L));
        harness.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3L));
        harness.processElement(new StreamRecord<>(startEvent1, 1L));
        harness.processElement(new StreamRecord<>(startEventK2, 1L));
        // there must be 2 keys 42, 43 registered for the watermark callback
        // all the seen elements must be in the priority queues but no NFA yet.
        assertEquals(2L, harness.numEventTimeTimers());
        assertEquals(4L, operator.getPQSize(42));
        assertEquals(1L, operator.getPQSize(43));
        assertTrue(!operator.hasNonEmptySharedBuffer(42));
        assertTrue(!operator.hasNonEmptySharedBuffer(43));
        harness.processWatermark(new Watermark(2L));
        verifyWatermark(harness.getOutput().poll(), Long.MIN_VALUE);
        verifyWatermark(harness.getOutput().poll(), 2L);
        // still the 2 keys
        // one element in PQ for 42 (the barfoo) as it arrived early
        // for 43 the element entered the NFA and the PQ is empty
        assertEquals(2L, harness.numEventTimeTimers());
        assertTrue(operator.hasNonEmptySharedBuffer(42));
        assertEquals(1L, operator.getPQSize(42));
        assertTrue(operator.hasNonEmptySharedBuffer(43));
        assertTrue(!operator.hasNonEmptyPQ(43));
        harness.processElement(new StreamRecord<>(startEvent2, 4L));
        harness.processElement(new StreamRecord<Event>(middleEvent2, 5L));
        OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
        harness.close();
        CepOperator<Event, Integer, Map<String, List<Event>>> operator2 = getKeyedCepOperator(false);
        harness = CepOperatorTestUtilities.getCepTestHarness(operator2);
        harness.setup();
        harness.initializeState(snapshot);
        harness.open();
        harness.processElement(new StreamRecord<>(endEvent1, 6L));
        harness.processWatermark(11L);
        harness.processWatermark(12L);
        // now we have 1 key because the 43 expired and was removed.
        // 42 is still there due to startEvent2
        assertEquals(1L, harness.numEventTimeTimers());
        assertTrue(operator2.hasNonEmptySharedBuffer(42));
        assertTrue(!operator2.hasNonEmptyPQ(42));
        assertTrue(!operator2.hasNonEmptySharedBuffer(43));
        assertTrue(!operator2.hasNonEmptyPQ(43));
        verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
        verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent2, endEvent1);
        verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent1);
        verifyWatermark(harness.getOutput().poll(), 11L);
        verifyWatermark(harness.getOutput().poll(), 12L);
        // this is a late event, because timestamp(12) = last watermark(12)
        harness.processElement(new StreamRecord<Event>(middleEvent3, 12L));
        harness.processElement(new StreamRecord<>(endEvent2, 13L));
        harness.processWatermark(20L);
        harness.processWatermark(21L);
        assertTrue(!operator2.hasNonEmptySharedBuffer(42));
        assertTrue(!operator2.hasNonEmptyPQ(42));
        assertEquals(0L, harness.numEventTimeTimers());
        assertEquals(3, harness.getOutput().size());
        verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent2);
        verifyWatermark(harness.getOutput().poll(), 20L);
        verifyWatermark(harness.getOutput().poll(), 21L);
    } finally {
        harness.close();
    }
}
Also used : SubEvent(org.apache.flink.cep.SubEvent) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) Watermark(org.apache.flink.streaming.api.watermark.Watermark) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 7 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testKeyedCEPOperatorNFAUpdateWithRocksDB.

@Test
public void testKeyedCEPOperatorNFAUpdateWithRocksDB() throws Exception {
    String rocksDbPath = tempFolder.newFolder().getAbsolutePath();
    RocksDBStateBackend rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend(), TernaryBoolean.FALSE);
    rocksDBStateBackend.setDbStoragePath(rocksDbPath);
    CepOperator<Event, Integer, Map<String, List<Event>>> operator = CepOperatorTestUtilities.getKeyedCepOperator(true, new SimpleNFAFactory());
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
    try {
        harness.setStateBackend(rocksDBStateBackend);
        harness.open();
        Event startEvent = new Event(42, "c", 1.0);
        SubEvent middleEvent = new SubEvent(42, "a", 1.0, 10.0);
        Event endEvent = new Event(42, "b", 1.0);
        harness.processElement(new StreamRecord<>(startEvent, 1L));
        // simulate snapshot/restore with some elements in internal sorting queue
        OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
        harness.close();
        operator = CepOperatorTestUtilities.getKeyedCepOperator(true, new SimpleNFAFactory());
        harness = CepOperatorTestUtilities.getCepTestHarness(operator);
        rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend());
        rocksDBStateBackend.setDbStoragePath(rocksDbPath);
        harness.setStateBackend(rocksDBStateBackend);
        harness.setup();
        harness.initializeState(snapshot);
        harness.open();
        harness.processElement(new StreamRecord<>(new Event(42, "d", 1.0), 4L));
        OperatorSubtaskState snapshot2 = harness.snapshot(0L, 0L);
        harness.close();
        operator = CepOperatorTestUtilities.getKeyedCepOperator(true, new SimpleNFAFactory());
        harness = CepOperatorTestUtilities.getCepTestHarness(operator);
        rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend());
        rocksDBStateBackend.setDbStoragePath(rocksDbPath);
        harness.setStateBackend(rocksDBStateBackend);
        harness.setup();
        harness.initializeState(snapshot2);
        harness.open();
        harness.processElement(new StreamRecord<Event>(middleEvent, 4L));
        harness.processElement(new StreamRecord<>(endEvent, 4L));
        // get and verify the output
        Queue<Object> result = harness.getOutput();
        assertEquals(1, result.size());
        verifyPattern(result.poll(), startEvent, middleEvent, endEvent);
    } finally {
        harness.close();
    }
}
Also used : RocksDBStateBackend(org.apache.flink.contrib.streaming.state.RocksDBStateBackend) SubEvent(org.apache.flink.cep.SubEvent) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 8 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testCEPOperatorComparatorEventTime.

@Test
public void testCEPOperatorComparatorEventTime() throws Exception {
    Event startEvent1 = new Event(42, "start", 1.0);
    Event startEvent2 = new Event(42, "start", 2.0);
    SubEvent middleEvent1 = new SubEvent(42, "foo1", 1.0, 10.0);
    SubEvent middleEvent2 = new SubEvent(42, "foo2", 2.0, 10.0);
    Event endEvent = new Event(42, "end", 1.0);
    Event startEventK2 = new Event(43, "start", 1.0);
    CepOperator<Event, Integer, Map<String, List<Event>>> operator = getKeyedCepOperatorWithComparator(false);
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
    try {
        harness.open();
        harness.processWatermark(0L);
        harness.processElement(new StreamRecord<>(startEvent1, 1L));
        harness.processElement(new StreamRecord<>(startEventK2, 1L));
        harness.processElement(new StreamRecord<>(new Event(42, "foobar", 1.0), 2L));
        harness.processElement(new StreamRecord<Event>(new SubEvent(42, "barfoo", 1.0, 5.0), 3L));
        assertTrue(operator.hasNonEmptyPQ(42));
        assertTrue(operator.hasNonEmptyPQ(43));
        assertFalse(operator.hasNonEmptySharedBuffer(42));
        assertFalse(operator.hasNonEmptySharedBuffer(43));
        harness.processWatermark(3L);
        assertFalse(operator.hasNonEmptyPQ(42));
        assertFalse(operator.hasNonEmptyPQ(43));
        assertTrue(operator.hasNonEmptySharedBuffer(42));
        assertTrue(operator.hasNonEmptySharedBuffer(43));
        harness.processElement(new StreamRecord<>(startEvent2, 4L));
        harness.processElement(new StreamRecord<Event>(middleEvent2, 5L));
        harness.processElement(new StreamRecord<Event>(middleEvent1, 5L));
        OperatorSubtaskState snapshot = harness.snapshot(0L, 0L);
        harness.close();
        CepOperator<Event, Integer, Map<String, List<Event>>> operator2 = getKeyedCepOperatorWithComparator(false);
        harness = CepOperatorTestUtilities.getCepTestHarness(operator2);
        harness.setup();
        harness.initializeState(snapshot);
        harness.open();
        harness.processElement(new StreamRecord<>(endEvent, 6L));
        harness.processWatermark(6L);
        verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent1, endEvent);
        verifyPattern(harness.getOutput().poll(), startEvent1, middleEvent2, endEvent);
        verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent1, endEvent);
        verifyPattern(harness.getOutput().poll(), startEvent2, middleEvent2, endEvent);
        verifyWatermark(harness.getOutput().poll(), 6L);
    } finally {
        harness.close();
    }
}
Also used : SubEvent(org.apache.flink.cep.SubEvent) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) Test(org.junit.Test)

Example 9 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testKeyedCEPOperatorNFAUpdateTimesWithRocksDB.

@Test
public void testKeyedCEPOperatorNFAUpdateTimesWithRocksDB() throws Exception {
    String rocksDbPath = tempFolder.newFolder().getAbsolutePath();
    RocksDBStateBackend rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend());
    rocksDBStateBackend.setDbStoragePath(rocksDbPath);
    CepOperator<Event, Integer, Map<String, List<Event>>> operator = CepOperatorTestUtilities.getKeyedCepOperator(true, new SimpleNFAFactory());
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
    try {
        harness.setStateBackend(rocksDBStateBackend);
        harness.open();
        final ValueState nfaOperatorState = (ValueState) Whitebox.<ValueState>getInternalState(operator, "computationStates");
        final ValueState nfaOperatorStateSpy = Mockito.spy(nfaOperatorState);
        Whitebox.setInternalState(operator, "computationStates", nfaOperatorStateSpy);
        Event startEvent = new Event(42, "c", 1.0);
        SubEvent middleEvent = new SubEvent(42, "a", 1.0, 10.0);
        Event endEvent = new Event(42, "b", 1.0);
        harness.processElement(new StreamRecord<>(startEvent, 1L));
        harness.processElement(new StreamRecord<>(new Event(42, "d", 1.0), 4L));
        harness.processElement(new StreamRecord<Event>(middleEvent, 4L));
        harness.processElement(new StreamRecord<>(endEvent, 4L));
        // verify the number of invocations NFA is updated
        Mockito.verify(nfaOperatorStateSpy, Mockito.times(3)).update(Mockito.any());
        // get and verify the output
        Queue<Object> result = harness.getOutput();
        assertEquals(1, result.size());
        verifyPattern(result.poll(), startEvent, middleEvent, endEvent);
    } finally {
        harness.close();
    }
}
Also used : RocksDBStateBackend(org.apache.flink.contrib.streaming.state.RocksDBStateBackend) SubEvent(org.apache.flink.cep.SubEvent) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) ValueState(org.apache.flink.api.common.state.ValueState) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 10 with SubEvent

use of org.apache.flink.cep.SubEvent in project flink by apache.

the class CEPOperatorTest method testCEPOperatorSerializationWRocksDB.

@Test
public void testCEPOperatorSerializationWRocksDB() throws Exception {
    String rocksDbPath = tempFolder.newFolder().getAbsolutePath();
    RocksDBStateBackend rocksDBStateBackend = new RocksDBStateBackend(new MemoryStateBackend());
    rocksDBStateBackend.setDbStoragePath(rocksDbPath);
    final Event startEvent1 = new Event(40, "start", 1.0);
    final Event startEvent2 = new Event(40, "start", 2.0);
    final SubEvent middleEvent1 = new SubEvent(40, "foo1", 1.0, 10);
    final SubEvent middleEvent2 = new SubEvent(40, "foo2", 2.0, 10);
    final SubEvent middleEvent3 = new SubEvent(40, "foo3", 3.0, 10);
    final SubEvent middleEvent4 = new SubEvent(40, "foo4", 1.0, 10);
    final Event nextOne = new Event(40, "next-one", 1.0);
    final Event endEvent = new Event(40, "end", 1.0);
    final Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 5726188262756267490L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("start");
        }
    }).followedBy("middle").subtype(SubEvent.class).where(new IterativeCondition<SubEvent>() {

        private static final long serialVersionUID = 6215754202506583964L;

        @Override
        public boolean filter(SubEvent value, Context<SubEvent> ctx) throws Exception {
            if (!value.getName().startsWith("foo")) {
                return false;
            }
            double sum = 0.0;
            for (Event event : ctx.getEventsForPattern("middle")) {
                sum += event.getPrice();
            }
            sum += value.getPrice();
            return Double.compare(sum, 5.0) < 0;
        }
    }).oneOrMore().allowCombinations().followedBy("end").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 7056763917392056548L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("end");
        }
    });
    CepOperator<Event, Integer, Map<String, List<Event>>> operator = CepOperatorTestUtilities.getKeyedCepOperator(false, new NFACompiler.NFAFactory<Event>() {

        private static final long serialVersionUID = 477082663248051994L;

        @Override
        public NFA<Event> createNFA() {
            return NFACompiler.compileFactory(pattern, false).createNFA();
        }
    });
    OneInputStreamOperatorTestHarness<Event, Map<String, List<Event>>> harness = CepOperatorTestUtilities.getCepTestHarness(operator);
    try {
        harness.setStateBackend(rocksDBStateBackend);
        harness.open();
        harness.processWatermark(0L);
        harness.processElement(new StreamRecord<>(startEvent1, 1));
        harness.processElement(new StreamRecord<Event>(middleEvent1, 2));
        harness.processWatermark(2L);
        harness.processElement(new StreamRecord<Event>(middleEvent3, 5));
        harness.processElement(new StreamRecord<Event>(middleEvent2, 3));
        harness.processElement(new StreamRecord<>(startEvent2, 4));
        harness.processWatermark(5L);
        harness.processElement(new StreamRecord<>(nextOne, 7));
        harness.processElement(new StreamRecord<>(endEvent, 8));
        harness.processElement(new StreamRecord<Event>(middleEvent4, 6));
        harness.processWatermark(100L);
        List<List<Event>> resultingPatterns = new ArrayList<>();
        while (!harness.getOutput().isEmpty()) {
            Object o = harness.getOutput().poll();
            if (!(o instanceof Watermark)) {
                StreamRecord<Map<String, List<Event>>> el = (StreamRecord<Map<String, List<Event>>>) o;
                List<Event> res = new ArrayList<>();
                for (List<Event> le : el.getValue().values()) {
                    res.addAll(le);
                }
                resultingPatterns.add(res);
            }
        }
        compareMaps(resultingPatterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(startEvent1, endEvent, middleEvent1, middleEvent2, middleEvent4), Lists.newArrayList(startEvent1, endEvent, middleEvent2, middleEvent1), Lists.newArrayList(startEvent1, endEvent, middleEvent3, middleEvent1), Lists.newArrayList(startEvent2, endEvent, middleEvent3, middleEvent4), Lists.newArrayList(startEvent1, endEvent, middleEvent4, middleEvent1), Lists.newArrayList(startEvent1, endEvent, middleEvent1), Lists.newArrayList(startEvent2, endEvent, middleEvent3)));
    } finally {
        harness.close();
    }
}
Also used : RocksDBStateBackend(org.apache.flink.contrib.streaming.state.RocksDBStateBackend) SimpleCondition(org.apache.flink.cep.pattern.conditions.SimpleCondition) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) NFA(org.apache.flink.cep.nfa.NFA) CepOperatorBuilder.createOperatorForNFA(org.apache.flink.cep.utils.CepOperatorBuilder.createOperatorForNFA) ArrayList(java.util.ArrayList) NFACompiler(org.apache.flink.cep.nfa.compiler.NFACompiler) List(java.util.List) ArrayList(java.util.ArrayList) SubEvent(org.apache.flink.cep.SubEvent) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) HashMap(java.util.HashMap) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Aggregations

Event (org.apache.flink.cep.Event)27 SubEvent (org.apache.flink.cep.SubEvent)27 Test (org.junit.Test)27 Map (java.util.Map)21 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)15 Watermark (org.apache.flink.streaming.api.watermark.Watermark)14 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)13 HashMap (java.util.HashMap)11 List (java.util.List)10 KeySelector (org.apache.flink.api.java.functions.KeySelector)9 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)8 ArrayList (java.util.ArrayList)7 SimpleCondition (org.apache.flink.cep.pattern.conditions.SimpleCondition)7 RocksDBStateBackend (org.apache.flink.contrib.streaming.state.RocksDBStateBackend)4 MemoryStateBackend (org.apache.flink.runtime.state.memory.MemoryStateBackend)4 Ignore (org.junit.Ignore)3 ValueState (org.apache.flink.api.common.state.ValueState)2 NullByteKeySelector (org.apache.flink.api.java.functions.NullByteKeySelector)2 IterativeCondition (org.apache.flink.cep.pattern.conditions.IterativeCondition)2 NFA (org.apache.flink.cep.nfa.NFA)1