Search in sources :

Example 81 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class NFAITCase method testSimplePatternNFA.

@Test
public void testSimplePatternNFA() throws Exception {
    List<StreamRecord<Event>> inputEvents = new ArrayList<>();
    Event startEvent = new Event(41, "start", 1.0);
    SubEvent middleEvent = new SubEvent(42, "foo", 1.0, 10.0);
    Event endEvent = new Event(43, "end", 1.0);
    inputEvents.add(new StreamRecord<>(startEvent, 1));
    inputEvents.add(new StreamRecord<>(new Event(43, "foobar", 1.0), 2));
    inputEvents.add(new StreamRecord<Event>(new SubEvent(41, "barfoo", 1.0, 5.0), 3));
    inputEvents.add(new StreamRecord<Event>(middleEvent, 3));
    inputEvents.add(new StreamRecord<>(new Event(43, "start", 1.0), 4));
    inputEvents.add(new StreamRecord<>(endEvent, 5));
    Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 5726188262756267490L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("start");
        }
    }).followedBy("middle").subtype(SubEvent.class).where(new SimpleCondition<SubEvent>() {

        private static final long serialVersionUID = 6215754202506583964L;

        @Override
        public boolean filter(SubEvent value) throws Exception {
            return value.getVolume() > 5.0;
        }
    }).followedBy("end").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 7056763917392056548L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("end");
        }
    });
    NFA<Event> nfa = compile(pattern, false);
    List<List<Event>> resultingPatterns = feedNFA(inputEvents, nfa);
    comparePatterns(resultingPatterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(startEvent, middleEvent, endEvent)));
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) SubEvent(org.apache.flink.cep.SubEvent) SimpleCondition(org.apache.flink.cep.pattern.conditions.SimpleCondition) ArrayList(java.util.ArrayList) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) ArrayList(java.util.ArrayList) List(java.util.List) Test(org.junit.Test)

Example 82 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class NFAITCase method testZeroOrMore.

@Test
public void testZeroOrMore() throws Exception {
    List<StreamRecord<Event>> inputEvents = new ArrayList<>();
    Event startEvent = new Event(40, "c", 1.0);
    Event middleEvent1 = new Event(41, "a", 2.0);
    Event middleEvent2 = new Event(42, "a", 3.0);
    Event end1 = new Event(44, "b", 5.0);
    inputEvents.add(new StreamRecord<>(startEvent, 1));
    inputEvents.add(new StreamRecord<>(middleEvent1, 3));
    inputEvents.add(new StreamRecord<>(middleEvent2, 4));
    inputEvents.add(new StreamRecord<>(end1, 6));
    Pattern<Event, ?> pattern = Pattern.<Event>begin("start").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 5726188262756267490L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("c");
        }
    }).followedByAny("middle").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 5726188262756267490L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("a");
        }
    }).oneOrMore().allowCombinations().optional().followedBy("end1").where(new SimpleCondition<Event>() {

        private static final long serialVersionUID = 5726188262756267490L;

        @Override
        public boolean filter(Event value) throws Exception {
            return value.getName().equals("b");
        }
    });
    NFA<Event> nfa = compile(pattern, false);
    final List<List<Event>> resultingPatterns = feedNFA(inputEvents, nfa);
    comparePatterns(resultingPatterns, Lists.<List<Event>>newArrayList(Lists.newArrayList(startEvent, middleEvent1, middleEvent2, end1), Lists.newArrayList(startEvent, middleEvent1, end1), Lists.newArrayList(startEvent, middleEvent2, end1), Lists.newArrayList(startEvent, end1)));
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) SimpleCondition(org.apache.flink.cep.pattern.conditions.SimpleCondition) ArrayList(java.util.ArrayList) Event(org.apache.flink.cep.Event) SubEvent(org.apache.flink.cep.SubEvent) ArrayList(java.util.ArrayList) List(java.util.List) Test(org.junit.Test)

Example 83 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperator method onProcessingTime.

@Override
public void onProcessingTime(InternalTimer<K, W> timer) throws Exception {
    triggerContext.key = timer.getKey();
    triggerContext.window = timer.getNamespace();
    evictorContext.key = timer.getKey();
    evictorContext.window = timer.getNamespace();
    MergingWindowSet<W> mergingWindows = null;
    if (windowAssigner instanceof MergingWindowAssigner) {
        mergingWindows = getMergingWindowSet();
        W stateWindow = mergingWindows.getStateWindow(triggerContext.window);
        if (stateWindow == null) {
            // window and therefore the Trigger state, however, so nothing to do.
            return;
        } else {
            evictingWindowState.setCurrentNamespace(stateWindow);
        }
    } else {
        evictingWindowState.setCurrentNamespace(triggerContext.window);
    }
    TriggerResult triggerResult = triggerContext.onProcessingTime(timer.getTimestamp());
    if (triggerResult.isFire()) {
        Iterable<StreamRecord<IN>> contents = evictingWindowState.get();
        if (contents != null) {
            emitWindowContents(triggerContext.window, contents, evictingWindowState);
        }
    }
    if (triggerResult.isPurge()) {
        evictingWindowState.clear();
    }
    if (!windowAssigner.isEventTime() && isCleanupTime(triggerContext.window, timer.getTimestamp())) {
        clearAllState(triggerContext.window, evictingWindowState, mergingWindows);
    }
    if (mergingWindows != null) {
        // need to make sure to update the merging state in state
        mergingWindows.persist();
    }
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TriggerResult(org.apache.flink.streaming.api.windowing.triggers.TriggerResult) MergingWindowAssigner(org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner)

Example 84 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperator method processElement.

@Override
public void processElement(StreamRecord<IN> element) throws Exception {
    final Collection<W> elementWindows = windowAssigner.assignWindows(element.getValue(), element.getTimestamp(), windowAssignerContext);
    // if element is handled by none of assigned elementWindows
    boolean isSkippedElement = true;
    final K key = this.<K>getKeyedStateBackend().getCurrentKey();
    if (windowAssigner instanceof MergingWindowAssigner) {
        MergingWindowSet<W> mergingWindows = getMergingWindowSet();
        for (W window : elementWindows) {
            // adding the new window might result in a merge, in that case the actualWindow
            // is the merged window and we work with that. If we don't merge then
            // actualWindow == window
            W actualWindow = mergingWindows.addWindow(window, new MergingWindowSet.MergeFunction<W>() {

                @Override
                public void merge(W mergeResult, Collection<W> mergedWindows, W stateWindowResult, Collection<W> mergedStateWindows) throws Exception {
                    if ((windowAssigner.isEventTime() && mergeResult.maxTimestamp() + allowedLateness <= internalTimerService.currentWatermark())) {
                        throw new UnsupportedOperationException("The end timestamp of an " + "event-time window cannot become earlier than the current watermark " + "by merging. Current watermark: " + internalTimerService.currentWatermark() + " window: " + mergeResult);
                    } else if (!windowAssigner.isEventTime() && mergeResult.maxTimestamp() <= internalTimerService.currentProcessingTime()) {
                        throw new UnsupportedOperationException("The end timestamp of a " + "processing-time window cannot become earlier than the current processing time " + "by merging. Current processing time: " + internalTimerService.currentProcessingTime() + " window: " + mergeResult);
                    }
                    triggerContext.key = key;
                    triggerContext.window = mergeResult;
                    triggerContext.onMerge(mergedWindows);
                    for (W m : mergedWindows) {
                        triggerContext.window = m;
                        triggerContext.clear();
                        deleteCleanupTimer(m);
                    }
                    // merge the merged state windows into the newly resulting
                    // state window
                    evictingWindowState.mergeNamespaces(stateWindowResult, mergedStateWindows);
                }
            });
            // drop if the window is already late
            if (isWindowLate(actualWindow)) {
                mergingWindows.retireWindow(actualWindow);
                continue;
            }
            isSkippedElement = false;
            W stateWindow = mergingWindows.getStateWindow(actualWindow);
            if (stateWindow == null) {
                throw new IllegalStateException("Window " + window + " is not in in-flight window set.");
            }
            evictingWindowState.setCurrentNamespace(stateWindow);
            evictingWindowState.add(element);
            triggerContext.key = key;
            triggerContext.window = actualWindow;
            evictorContext.key = key;
            evictorContext.window = actualWindow;
            TriggerResult triggerResult = triggerContext.onElement(element);
            if (triggerResult.isFire()) {
                Iterable<StreamRecord<IN>> contents = evictingWindowState.get();
                if (contents == null) {
                    // if we have no state, there is nothing to do
                    continue;
                }
                emitWindowContents(actualWindow, contents, evictingWindowState);
            }
            if (triggerResult.isPurge()) {
                evictingWindowState.clear();
            }
            registerCleanupTimer(actualWindow);
        }
        // need to make sure to update the merging state in state
        mergingWindows.persist();
    } else {
        for (W window : elementWindows) {
            // check if the window is already inactive
            if (isWindowLate(window)) {
                continue;
            }
            isSkippedElement = false;
            evictingWindowState.setCurrentNamespace(window);
            evictingWindowState.add(element);
            triggerContext.key = key;
            triggerContext.window = window;
            evictorContext.key = key;
            evictorContext.window = window;
            TriggerResult triggerResult = triggerContext.onElement(element);
            if (triggerResult.isFire()) {
                Iterable<StreamRecord<IN>> contents = evictingWindowState.get();
                if (contents == null) {
                    // if we have no state, there is nothing to do
                    continue;
                }
                emitWindowContents(window, contents, evictingWindowState);
            }
            if (triggerResult.isPurge()) {
                evictingWindowState.clear();
            }
            registerCleanupTimer(window);
        }
    }
    // element timestamp
    if (isSkippedElement && isElementLate(element)) {
        if (lateDataOutputTag != null) {
            sideOutput(element);
        } else {
            this.numLateRecordsDropped.inc();
        }
    }
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) MergingWindowAssigner(org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner) TriggerResult(org.apache.flink.streaming.api.windowing.triggers.TriggerResult)

Example 85 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class SerializerComparatorTestData method getOrderedStringTestData.

@SuppressWarnings("unchecked")
static Tuple2<byte[], StreamRecord<String>>[] getOrderedStringTestData() {
    StringSerializer stringSerializer = new StringSerializer();
    DataOutputSerializer outputSerializer = new DataOutputSerializer(64);
    return Stream.of(new String(new byte[] { -1, 0 }), new String(new byte[] { 0, 1 }), "A", "AB", "ABC", "ABCD", "ABCDE", "ABCDEF", "ABCDEFG", "ABCDEFGH").map(str -> {
        try {
            stringSerializer.serialize(str, outputSerializer);
            byte[] copyOfBuffer = outputSerializer.getCopyOfBuffer();
            outputSerializer.clear();
            return Tuple2.of(copyOfBuffer, new StreamRecord<>(str, 0));
        } catch (IOException e) {
            throw new AssertionError(e);
        }
    }).sorted((o1, o2) -> {
        byte[] key0 = o1.f0;
        byte[] key1 = o2.f0;
        int firstLength = key0.length;
        int secondLength = key1.length;
        int minLength = Math.min(firstLength, secondLength);
        for (int i = 0; i < minLength; i++) {
            int cmp = Byte.compare(key0[i], key1[i]);
            if (cmp != 0) {
                return cmp;
            }
        }
        int lengthCmp = Integer.compare(firstLength, secondLength);
        if (lengthCmp != 0) {
            return lengthCmp;
        }
        return Long.compare(o1.f1.getTimestamp(), o2.f1.getTimestamp());
    }).toArray(Tuple2[]::new);
}
Also used : IntStream(java.util.stream.IntStream) IntSerializer(org.apache.flink.api.common.typeutils.base.IntSerializer) Stream(java.util.stream.Stream) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) Tuple2(org.apache.flink.api.java.tuple.Tuple2) DataOutputSerializer(org.apache.flink.core.memory.DataOutputSerializer) IOException(java.io.IOException) StringSerializer(org.apache.flink.api.common.typeutils.base.StringSerializer) DataOutputSerializer(org.apache.flink.core.memory.DataOutputSerializer) IOException(java.io.IOException) StringSerializer(org.apache.flink.api.common.typeutils.base.StringSerializer)

Aggregations

StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)270 Test (org.junit.Test)212 ArrayList (java.util.ArrayList)156 List (java.util.List)151 Event (org.apache.flink.cep.Event)136 SimpleCondition (org.apache.flink.cep.pattern.conditions.SimpleCondition)61 SubEvent (org.apache.flink.cep.SubEvent)47 Watermark (org.apache.flink.streaming.api.watermark.Watermark)45 NFATestHarness (org.apache.flink.cep.utils.NFATestHarness)39 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)36 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)34 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)31 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)31 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)30 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)28 HashMap (java.util.HashMap)23 Map (java.util.Map)23 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)23 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)20 KeySelector (org.apache.flink.api.java.functions.KeySelector)20