Search in sources :

Example 11 with Event

use of org.apache.flink.cep.Event in project flink by apache.

the class CEPRescalingTest method testCEPFunctionScalingDown.

@Test
public void testCEPFunctionScalingDown() throws Exception {
    int maxParallelism = 10;
    KeySelector<Event, Integer> keySelector = new KeySelector<Event, Integer>() {

        private static final long serialVersionUID = -4873366487571254798L;

        @Override
        public Integer getKey(Event value) throws Exception {
            return value.getId();
        }
    };
    // create some valid pattern events on predetermined key groups and task indices
    // this will go to task index 0
    Event startEvent1 = new Event(7, "start", 1.0);
    SubEvent middleEvent1 = new SubEvent(7, "foo", 1.0, 10.0);
    Event endEvent1 = new Event(7, "end", 1.0);
    // verification of the key choice
    int keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent1), maxParallelism);
    assertEquals(1, keygroup);
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // this will go to task index 1
    Event startEvent2 = new Event(45, "start", 1.0);
    SubEvent middleEvent2 = new SubEvent(45, "foo", 1.0, 10.0);
    Event endEvent2 = new Event(45, "end", 1.0);
    keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent2), maxParallelism);
    assertEquals(6, keygroup);
    assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // this will go to task index 0
    Event startEvent3 = new Event(90, "start", 1.0);
    SubEvent middleEvent3 = new SubEvent(90, "foo", 1.0, 10.0);
    Event endEvent3 = new Event(90, "end", 1.0);
    keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent3), maxParallelism);
    assertEquals(2, keygroup);
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(0, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // this will go to task index 2
    Event startEvent4 = new Event(10, "start", 1.0);
    SubEvent middleEvent4 = new SubEvent(10, "foo", 1.0, 10.0);
    Event endEvent4 = new Event(10, "end", 1.0);
    keygroup = KeyGroupRangeAssignment.assignToKeyGroup(keySelector.getKey(startEvent4), maxParallelism);
    assertEquals(9, keygroup);
    assertEquals(2, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 3, keygroup));
    assertEquals(1, KeyGroupRangeAssignment.computeOperatorIndexForKeyGroup(maxParallelism, 2, keygroup));
    // starting the test, we will go from parallelism of 3 to parallelism of 2
    OneInputStreamOperatorTestHarness<Event, Map<String, Event>> harness1 = getTestHarness(maxParallelism, 3, 0);
    harness1.open();
    OneInputStreamOperatorTestHarness<Event, Map<String, Event>> harness2 = getTestHarness(maxParallelism, 3, 1);
    harness2.open();
    OneInputStreamOperatorTestHarness<Event, Map<String, Event>> harness3 = getTestHarness(maxParallelism, 3, 2);
    harness3.open();
    harness1.processWatermark(Long.MIN_VALUE);
    harness2.processWatermark(Long.MIN_VALUE);
    harness3.processWatermark(Long.MIN_VALUE);
    // valid element
    harness1.processElement(new StreamRecord<>(startEvent1, 1));
    harness1.processElement(new StreamRecord<>(new Event(7, "foobar", 1.0), 2));
    // valid element
    harness1.processElement(new StreamRecord<Event>(middleEvent1, 3));
    // valid element
    harness1.processElement(new StreamRecord<>(endEvent1, 5));
    // till here we have a valid sequence, so after creating the
    // new instance and sending it a watermark, we expect it to fire,
    // even with no new elements.
    harness1.processElement(new StreamRecord<>(startEvent3, 10));
    harness1.processElement(new StreamRecord<>(startEvent1, 10));
    harness2.processElement(new StreamRecord<>(startEvent2, 7));
    harness2.processElement(new StreamRecord<Event>(middleEvent2, 8));
    harness3.processElement(new StreamRecord<>(startEvent4, 15));
    harness3.processElement(new StreamRecord<Event>(middleEvent4, 16));
    harness3.processElement(new StreamRecord<>(endEvent4, 17));
    // so far we only have the initial watermark
    assertEquals(1, harness1.getOutput().size());
    verifyWatermark(harness1.getOutput().poll(), Long.MIN_VALUE);
    assertEquals(1, harness2.getOutput().size());
    verifyWatermark(harness2.getOutput().poll(), Long.MIN_VALUE);
    assertEquals(1, harness3.getOutput().size());
    verifyWatermark(harness3.getOutput().poll(), Long.MIN_VALUE);
    // we take a snapshot and make it look as a single operator
    // this will be the initial state of all downstream tasks.
    OperatorStateHandles snapshot = AbstractStreamOperatorTestHarness.repackageState(harness2.snapshot(0, 0), harness1.snapshot(0, 0), harness3.snapshot(0, 0));
    OneInputStreamOperatorTestHarness<Event, Map<String, Event>> harness4 = getTestHarness(maxParallelism, 2, 0);
    harness4.setup();
    harness4.initializeState(snapshot);
    harness4.open();
    OneInputStreamOperatorTestHarness<Event, Map<String, Event>> harness5 = getTestHarness(maxParallelism, 2, 1);
    harness5.setup();
    harness5.initializeState(snapshot);
    harness5.open();
    harness5.processElement(new StreamRecord<>(endEvent2, 11));
    harness5.processWatermark(new Watermark(12));
    verifyPattern(harness5.getOutput().poll(), startEvent2, middleEvent2, endEvent2);
    verifyWatermark(harness5.getOutput().poll(), 12);
    // if element timestamps are not correctly checkpointed/restored this will lead to
    // a pruning time underflow exception in NFA
    harness4.processWatermark(new Watermark(12));
    assertEquals(2, harness4.getOutput().size());
    verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
    verifyWatermark(harness4.getOutput().poll(), 12);
    // valid element
    harness4.processElement(new StreamRecord<Event>(middleEvent3, 15));
    // valid element
    harness4.processElement(new StreamRecord<>(endEvent3, 16));
    // valid element
    harness4.processElement(new StreamRecord<Event>(middleEvent1, 15));
    // valid element
    harness4.processElement(new StreamRecord<>(endEvent1, 16));
    harness4.processWatermark(new Watermark(Long.MAX_VALUE));
    harness5.processWatermark(new Watermark(Long.MAX_VALUE));
    // verify result
    assertEquals(3, harness4.getOutput().size());
    // check the order of the events in the output
    Queue<Object> output = harness4.getOutput();
    StreamRecord<?> resultRecord = (StreamRecord<?>) output.peek();
    assertTrue(resultRecord.getValue() instanceof Map);
    @SuppressWarnings("unchecked") Map<String, Event> patternMap = (Map<String, Event>) resultRecord.getValue();
    if (patternMap.get("start").getId() == 7) {
        verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
        verifyPattern(harness4.getOutput().poll(), startEvent3, middleEvent3, endEvent3);
    } else {
        verifyPattern(harness4.getOutput().poll(), startEvent3, middleEvent3, endEvent3);
        verifyPattern(harness4.getOutput().poll(), startEvent1, middleEvent1, endEvent1);
    }
    // after scaling down this should end up here
    assertEquals(2, harness5.getOutput().size());
    verifyPattern(harness5.getOutput().poll(), startEvent4, middleEvent4, endEvent4);
    harness1.close();
    harness2.close();
    harness3.close();
    harness4.close();
    harness5.close();
}
Also used : SubEvent(org.apache.flink.cep.SubEvent) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) KeySelector(org.apache.flink.api.java.functions.KeySelector) OperatorStateHandles(org.apache.flink.streaming.runtime.tasks.OperatorStateHandles) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Map(java.util.Map) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 12 with Event

use of org.apache.flink.cep.Event in project flink by apache.

the class NFAITCase method testSimplePatternWithTimeoutHandling.

/**
	 * Tests that the NFA successfully returns partially matched event sequences when they've timed
	 * out.
	 */
@Test
public void testSimplePatternWithTimeoutHandling() {
    List<StreamRecord<Event>> events = new ArrayList<>();
    List<Map<String, Event>> resultingPatterns = new ArrayList<>();
    Set<Tuple2<Map<String, Event>, Long>> resultingTimeoutPatterns = new HashSet<>();
    Set<Tuple2<Map<String, Event>, Long>> expectedTimeoutPatterns = new HashSet<>();
    events.add(new StreamRecord<Event>(new Event(1, "start", 1.0), 1));
    events.add(new StreamRecord<Event>(new Event(2, "start", 1.0), 2));
    events.add(new StreamRecord<Event>(new Event(3, "middle", 1.0), 3));
    events.add(new StreamRecord<Event>(new Event(4, "foobar", 1.0), 4));
    events.add(new StreamRecord<Event>(new Event(5, "end", 1.0), 11));
    events.add(new StreamRecord<Event>(new Event(6, "end", 1.0), 13));
    Map<String, Event> timeoutPattern1 = new HashMap<>();
    timeoutPattern1.put("start", new Event(1, "start", 1.0));
    timeoutPattern1.put("middle", new Event(3, "middle", 1.0));
    Map<String, Event> timeoutPattern2 = new HashMap<>();
    timeoutPattern2.put("start", new Event(2, "start", 1.0));
    timeoutPattern2.put("middle", new Event(3, "middle", 1.0));
    Map<String, Event> timeoutPattern3 = new HashMap<>();
    timeoutPattern3.put("start", new Event(1, "start", 1.0));
    Map<String, Event> timeoutPattern4 = new HashMap<>();
    timeoutPattern4.put("start", new Event(2, "start", 1.0));
    expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern1, 11L));
    expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern2, 13L));
    expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern3, 11L));
    expectedTimeoutPatterns.add(Tuple2.of(timeoutPattern4, 13L));
    Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = 7907391379273505897L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("start");
        }
    }).followedBy("middle").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = -3268741540234334074L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("middle");
        }
    }).followedBy("end").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = -8995174172182138608L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("end");
        }
    }).within(Time.milliseconds(10));
    NFA<Event> nfa = NFACompiler.compile(pattern, Event.createTypeSerializer(), true);
    for (StreamRecord<Event> event : events) {
        Tuple2<Collection<Map<String, Event>>, Collection<Tuple2<Map<String, Event>, Long>>> patterns = nfa.process(event.getValue(), event.getTimestamp());
        Collection<Map<String, Event>> matchedPatterns = patterns.f0;
        Collection<Tuple2<Map<String, Event>, Long>> timeoutPatterns = patterns.f1;
        resultingPatterns.addAll(matchedPatterns);
        resultingTimeoutPatterns.addAll(timeoutPatterns);
    }
    assertEquals(1, resultingPatterns.size());
    assertEquals(expectedTimeoutPatterns.size(), resultingTimeoutPatterns.size());
    assertEquals(expectedTimeoutPatterns, resultingTimeoutPatterns);
}
Also used : FilterFunction(org.apache.flink.api.common.functions.FilterFunction) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) Collection(java.util.Collection) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 13 with Event

use of org.apache.flink.cep.Event in project flink by apache.

the class NFAITCase method testBranchingPattern.

@Test
public void testBranchingPattern() {
    List<StreamRecord<Event>> inputEvents = new ArrayList<>();
    Event startEvent = new Event(40, "start", 1.0);
    SubEvent middleEvent1 = new SubEvent(41, "foo1", 1.0, 10.0);
    SubEvent middleEvent2 = new SubEvent(42, "foo2", 1.0, 10.0);
    SubEvent middleEvent3 = new SubEvent(43, "foo3", 1.0, 10.0);
    SubEvent nextOne1 = new SubEvent(44, "next-one", 1.0, 2.0);
    SubEvent nextOne2 = new SubEvent(45, "next-one", 1.0, 2.0);
    Event endEvent = new Event(46, "end", 1.0);
    inputEvents.add(new StreamRecord<Event>(startEvent, 1));
    inputEvents.add(new StreamRecord<Event>(middleEvent1, 3));
    inputEvents.add(new StreamRecord<Event>(middleEvent2, 4));
    inputEvents.add(new StreamRecord<Event>(middleEvent3, 5));
    inputEvents.add(new StreamRecord<Event>(nextOne1, 6));
    inputEvents.add(new StreamRecord<Event>(nextOne2, 7));
    inputEvents.add(new StreamRecord<Event>(endEvent, 8));
    Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = 5726188262756267490L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("start");
        }
    }).followedBy("middle-first").subtype(SubEvent.class).where(new FilterFunction<SubEvent>() {

        private static final long serialVersionUID = 6215754202506583964L;

        @Override
        public boolean filter(SubEvent value) throws Exception {
            return value.getVolume() > 5.0;
        }
    }).followedBy("middle-second").subtype(SubEvent.class).where(new FilterFunction<SubEvent>() {

        private static final long serialVersionUID = 6215754202506583964L;

        @Override
        public boolean filter(SubEvent value) throws Exception {
            return value.getName().equals("next-one");
        }
    }).followedBy("end").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = 7056763917392056548L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("end");
        }
    });
    NFA<Event> nfa = NFACompiler.compile(pattern, Event.createTypeSerializer(), false);
    List<Map<String, Event>> resultingPatterns = new ArrayList<>();
    for (StreamRecord<Event> inputEvent : inputEvents) {
        Collection<Map<String, Event>> patterns = nfa.process(inputEvent.getValue(), inputEvent.getTimestamp()).f0;
        resultingPatterns.addAll(patterns);
    }
    assertEquals(6, resultingPatterns.size());
    final Set<Set<Event>> patterns = new HashSet<>();
    for (Map<String, Event> resultingPattern : resultingPatterns) {
        patterns.add(new HashSet<>(resultingPattern.values()));
    }
    assertEquals(Sets.newHashSet(Sets.newHashSet(startEvent, middleEvent1, nextOne1, endEvent), Sets.newHashSet(startEvent, middleEvent2, nextOne1, endEvent), Sets.newHashSet(startEvent, middleEvent3, nextOne1, endEvent), Sets.newHashSet(startEvent, middleEvent1, nextOne2, endEvent), Sets.newHashSet(startEvent, middleEvent2, nextOne2, endEvent), Sets.newHashSet(startEvent, middleEvent3, nextOne2, endEvent)), patterns);
}
Also used : FilterFunction(org.apache.flink.api.common.functions.FilterFunction) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) SubEvent(org.apache.flink.cep.SubEvent) Set(java.util.Set) HashSet(java.util.HashSet) ArrayList(java.util.ArrayList) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) HashMap(java.util.HashMap) Map(java.util.Map) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 14 with Event

use of org.apache.flink.cep.Event in project flink by apache.

the class NFAITCase method testSimplePatternWithTimeWindowNFA.

/**
	 * Tests that the NFA successfully filters out expired elements with respect to the window
	 * length
	 */
@Test
public void testSimplePatternWithTimeWindowNFA() {
    List<StreamRecord<Event>> events = new ArrayList<>();
    List<Map<String, Event>> resultingPatterns = new ArrayList<>();
    final Event startEvent;
    final Event middleEvent;
    final Event endEvent;
    events.add(new StreamRecord<Event>(new Event(1, "start", 1.0), 1));
    events.add(new StreamRecord<Event>(startEvent = new Event(2, "start", 1.0), 2));
    events.add(new StreamRecord<Event>(middleEvent = new Event(3, "middle", 1.0), 3));
    events.add(new StreamRecord<Event>(new Event(4, "foobar", 1.0), 4));
    events.add(new StreamRecord<Event>(endEvent = new Event(5, "end", 1.0), 11));
    events.add(new StreamRecord<Event>(new Event(6, "end", 1.0), 13));
    Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = 7907391379273505897L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("start");
        }
    }).followedBy("middle").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = -3268741540234334074L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("middle");
        }
    }).followedBy("end").where(new FilterFunction<Event>() {

        private static final long serialVersionUID = -8995174172182138608L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("end");
        }
    }).within(Time.milliseconds(10));
    NFA<Event> nfa = NFACompiler.compile(pattern, Event.createTypeSerializer(), false);
    for (StreamRecord<Event> event : events) {
        Collection<Map<String, Event>> patterns = nfa.process(event.getValue(), event.getTimestamp()).f0;
        resultingPatterns.addAll(patterns);
    }
    assertEquals(1, resultingPatterns.size());
    Map<String, Event> patternMap = resultingPatterns.get(0);
    assertEquals(startEvent, patternMap.get("start"));
    assertEquals(middleEvent, patternMap.get("middle"));
    assertEquals(endEvent, patternMap.get("end"));
}
Also used : FilterFunction(org.apache.flink.api.common.functions.FilterFunction) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) ArrayList(java.util.ArrayList) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) HashMap(java.util.HashMap) Map(java.util.Map) Test(org.junit.Test)

Example 15 with Event

use of org.apache.flink.cep.Event in project flink by apache.

the class NFATest method testNFASerialization.

@Test
public void testNFASerialization() throws IOException, ClassNotFoundException {
    NFA<Event> nfa = new NFA<>(Event.createTypeSerializer(), 0, false);
    State<Event> startingState = new State<>("", State.StateType.Start);
    State<Event> startState = new State<>("start", State.StateType.Normal);
    State<Event> endState = new State<>("end", State.StateType.Final);
    StateTransition<Event> starting2Start = new StateTransition<>(StateTransitionAction.TAKE, startState, new NameFilter("start"));
    StateTransition<Event> start2End = new StateTransition<>(StateTransitionAction.TAKE, endState, new NameFilter("end"));
    StateTransition<Event> start2Start = new StateTransition<>(StateTransitionAction.IGNORE, startState, null);
    startingState.addStateTransition(starting2Start);
    startState.addStateTransition(start2End);
    startState.addStateTransition(start2Start);
    nfa.addState(startingState);
    nfa.addState(startState);
    nfa.addState(endState);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ObjectOutputStream oos = new ObjectOutputStream(baos);
    oos.writeObject(nfa);
    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
    ObjectInputStream ois = new ObjectInputStream(bais);
    @SuppressWarnings("unchecked") NFA<Event> copy = (NFA<Event>) ois.readObject();
    assertEquals(nfa, copy);
}
Also used : ByteArrayOutputStream(org.apache.commons.io.output.ByteArrayOutputStream) ObjectOutputStream(java.io.ObjectOutputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) Event(org.apache.flink.cep.Event) ObjectInputStream(java.io.ObjectInputStream) Test(org.junit.Test)

Aggregations

Event (org.apache.flink.cep.Event)20 Test (org.junit.Test)20 Map (java.util.Map)17 SubEvent (org.apache.flink.cep.SubEvent)14 HashMap (java.util.HashMap)13 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)11 Watermark (org.apache.flink.streaming.api.watermark.Watermark)9 ArrayList (java.util.ArrayList)7 HashSet (java.util.HashSet)5 FilterFunction (org.apache.flink.api.common.functions.FilterFunction)4 OperatorStateHandles (org.apache.flink.streaming.runtime.tasks.OperatorStateHandles)4 KeySelector (org.apache.flink.api.java.functions.KeySelector)3 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)3 ByteArrayInputStream (java.io.ByteArrayInputStream)2 ObjectInputStream (java.io.ObjectInputStream)2 ObjectOutputStream (java.io.ObjectOutputStream)2 NullByteKeySelector (org.apache.flink.api.java.functions.NullByteKeySelector)2 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)2 LinkedHashMultimap (com.google.common.collect.LinkedHashMultimap)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1