Search in sources :

Example 96 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class WindowOperatorTest method testSideOutputDueToLatenessSessionZeroLateness.

@Test
public void testSideOutputDueToLatenessSessionZeroLateness() throws Exception {
    final int gapSize = 3;
    final long lateness = 0;
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(gapSize)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), EventTimeTrigger.create(), lateness, lateOutputTag);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = createTestHarness(operator);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1999));
    expected.add(new Watermark(1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new Watermark(4998));
    // this will not be dropped because the session we're adding two has maxTimestamp
    // after the current watermark
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
    testHarness.processWatermark(new Watermark(7400));
    expected.add(new Watermark(7400));
    // this will merge the two sessions into one
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
    testHarness.processWatermark(new Watermark(11501));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
    expected.add(new Watermark(11501));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
    expected.add(new Watermark(14600));
    // this is sideoutput as late, reuse last timestamp
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
    testHarness.processWatermark(new Watermark(20000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 14500L, 17500L), 17499));
    expected.add(new Watermark(20000));
    testHarness.processWatermark(new Watermark(100000));
    expected.add(new Watermark(100000));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
    TestHarnessUtil.assertOutputEqualsSorted("SideOutput was not correct.", sideExpected, (Iterable) sideActual, new Tuple2ResultSortComparator());
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 97 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class WindowOperatorTest method testNotSideOutputDueToLatenessSessionWithHugeLatenessPurgingTrigger.

@Test
public void testNotSideOutputDueToLatenessSessionWithHugeLatenessPurgingTrigger() throws Exception {
    final int gapSize = 3;
    final long lateness = 10000;
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(gapSize)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), PurgingTrigger.of(EventTimeTrigger.create()), lateness, lateOutputTag);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = createTestHarness(operator);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1999));
    expected.add(new Watermark(1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new Watermark(4998));
    // this will not be sideoutput because the session we're adding two has maxTimestamp
    // after the current watermark
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
    testHarness.processWatermark(new Watermark(7400));
    expected.add(new Watermark(7400));
    // this will merge the two sessions into one
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
    testHarness.processWatermark(new Watermark(11501));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
    expected.add(new Watermark(11501));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
    expected.add(new Watermark(14600));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 1000L, 14600L), 14599));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
    assertEquals(null, sideActual);
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
    testHarness.processWatermark(new Watermark(20000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 1000L, 17500L), 17499));
    expected.add(new Watermark(20000));
    testHarness.processWatermark(new Watermark(100000));
    expected.add(new Watermark(100000));
    actual = testHarness.getOutput();
    sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
    assertEquals(null, sideActual);
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 98 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperatorTest method testCountEvictorEvictAfter.

/**
 * Tests CountEvictor evictAfter behavior.
 */
@Test
public void testCountEvictorEvictAfter() throws Exception {
    AtomicInteger closeCalled = new AtomicInteger(0);
    final int windowSize = 4;
    final int triggerCount = 2;
    final boolean evictAfter = true;
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(triggerCount), CountEvictor.of(windowSize, evictAfter), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
    Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) GlobalWindow(org.apache.flink.streaming.api.windowing.windows.GlobalWindow) Test(org.junit.Test)

Example 99 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperatorTest method testTimeEvictorNoTimestamp.

/**
 * Tests time evictor, if no timestamp information in the StreamRecord. No element will be
 * evicted from the window.
 */
@Test
public void testTimeEvictorNoTimestamp() throws Exception {
    AtomicInteger closeCalled = new AtomicInteger(0);
    final int triggerCount = 2;
    final boolean evictAfter = true;
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(triggerCount), TimeEvictor.of(Time.seconds(2), evictAfter), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
    Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) GlobalWindow(org.apache.flink.streaming.api.windowing.windows.GlobalWindow) Test(org.junit.Test)

Example 100 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperatorTest method testCountTriggerWithApply.

@Test
@SuppressWarnings("unchecked")
public void testCountTriggerWithApply() throws Exception {
    AtomicInteger closeCalled = new AtomicInteger(0);
    final int windowSize = 4;
    final int windowSlide = 2;
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(windowSlide), CountEvictor.of(windowSize), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // The global window actually ignores these timestamps...
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
    Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TypeHint(org.apache.flink.api.common.typeinfo.TypeHint) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) GlobalWindow(org.apache.flink.streaming.api.windowing.windows.GlobalWindow) Test(org.junit.Test)

Aggregations

StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)270 Test (org.junit.Test)212 ArrayList (java.util.ArrayList)156 List (java.util.List)151 Event (org.apache.flink.cep.Event)136 SimpleCondition (org.apache.flink.cep.pattern.conditions.SimpleCondition)61 SubEvent (org.apache.flink.cep.SubEvent)47 Watermark (org.apache.flink.streaming.api.watermark.Watermark)45 NFATestHarness (org.apache.flink.cep.utils.NFATestHarness)39 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)36 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)34 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)31 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)31 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)30 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)28 HashMap (java.util.HashMap)23 Map (java.util.Map)23 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)23 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)20 KeySelector (org.apache.flink.api.java.functions.KeySelector)20