Search in sources :

Example 71 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class WindowOperatorTest method testNotSideOutputDueToLatenessSessionWithHugeLateness.

@Test
public void testNotSideOutputDueToLatenessSessionWithHugeLateness() throws Exception {
    final int GAP_SIZE = 3;
    final long LATENESS = 10000;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(GAP_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), EventTimeTrigger.create(), LATENESS, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1999));
    expected.add(new Watermark(1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new Watermark(4998));
    // this will not be sideoutput because the session we're adding two has maxTimestamp
    // after the current watermark
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
    testHarness.processWatermark(new Watermark(7400));
    expected.add(new Watermark(7400));
    // this will merge the two sessions into one
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
    testHarness.processWatermark(new Watermark(11501));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
    expected.add(new Watermark(11501));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
    expected.add(new Watermark(14600));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    // the maxTimestamp of the merged session is already late,
    // so we get an immediate firing
    expected.add(new StreamRecord<>(new Tuple3<>("key2-7", 1000L, 14600L), 14599));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
    assertEquals(null, sideActual);
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
    testHarness.processWatermark(new Watermark(20000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-8", 1000L, 17500L), 17499));
    expected.add(new Watermark(20000));
    testHarness.processWatermark(new Watermark(100000));
    expected.add(new Watermark(100000));
    actual = testHarness.getOutput();
    sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
    assertEquals(null, sideActual);
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 72 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class WindowOperatorTest method testNotSideOutputDueToLatenessSessionWithLateness.

@Test
public void testNotSideOutputDueToLatenessSessionWithLateness() throws Exception {
    // same as testSideOutputDueToLatenessSessionWithLateness() but with an accumulating trigger, i.e.
    // one that does not return FIRE_AND_PURGE when firing but just FIRE. The expected
    // results are therefore slightly different.
    final int GAP_SIZE = 3;
    final long LATENESS = 10;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(GAP_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), EventTimeTrigger.create(), LATENESS, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1999));
    expected.add(new Watermark(1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new Watermark(4998));
    // this will not be sideoutput because the session we're adding two has maxTimestamp
    // after the current watermark
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
    testHarness.processWatermark(new Watermark(7400));
    expected.add(new Watermark(7400));
    // this will merge the two sessions into one
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
    testHarness.processWatermark(new Watermark(11501));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
    expected.add(new Watermark(11501));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
    expected.add(new Watermark(14600));
    // because of the small allowed lateness and because the trigger is accumulating
    // this will be merged into the session (11600-14600) and therefore will not
    // be sideoutput as late
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
    // adding ("key2", 1) extended the session to (10000-146000) for which
    // maxTimestamp <= currentWatermark. Therefore, we immediately get a firing
    // with the current version of EventTimeTrigger/EventTimeTriggerAccum
    expected.add(new StreamRecord<>(new Tuple3<>("key2-2", 10000L, 14600L), 14599));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
    assertEquals(null, sideActual);
    testHarness.processWatermark(new Watermark(20000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-3", 10000L, 17500L), 17499));
    expected.add(new Watermark(20000));
    testHarness.processWatermark(new Watermark(100000));
    expected.add(new Watermark(100000));
    actual = testHarness.getOutput();
    sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
    assertEquals(null, sideActual);
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 73 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class PurgingTriggerTest method testForwarding.

@Test
public void testForwarding() throws Exception {
    Trigger<Object, TimeWindow> mockTrigger = mock(Trigger.class);
    TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(PurgingTrigger.of(mockTrigger), new TimeWindow.Serializer());
    when(mockTrigger.onElement(Matchers.anyObject(), anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.CONTINUE);
    assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
    when(mockTrigger.onElement(Matchers.anyObject(), anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.FIRE);
    assertEquals(TriggerResult.FIRE_AND_PURGE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
    when(mockTrigger.onElement(Matchers.anyObject(), anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.FIRE_AND_PURGE);
    assertEquals(TriggerResult.FIRE_AND_PURGE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
    when(mockTrigger.onElement(Matchers.anyObject(), anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.PURGE);
    assertEquals(TriggerResult.PURGE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
    doAnswer(new Answer<TriggerResult>() {

        @Override
        public TriggerResult answer(InvocationOnMock invocation) throws Exception {
            Trigger.TriggerContext context = (Trigger.TriggerContext) invocation.getArguments()[3];
            // register some timers that we can step through to call onEventTime several
            // times in a row
            context.registerEventTimeTimer(1);
            context.registerEventTimeTimer(2);
            context.registerEventTimeTimer(3);
            context.registerEventTimeTimer(4);
            return TriggerResult.CONTINUE;
        }
    }).when(mockTrigger).onElement(Matchers.<Integer>anyObject(), anyLong(), anyTimeWindow(), anyTriggerContext());
    // set up our timers
    testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2));
    assertEquals(4, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
    when(mockTrigger.onEventTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.CONTINUE);
    assertEquals(TriggerResult.CONTINUE, testHarness.advanceWatermark(1, new TimeWindow(0, 2)));
    when(mockTrigger.onEventTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.FIRE);
    assertEquals(TriggerResult.FIRE_AND_PURGE, testHarness.advanceWatermark(2, new TimeWindow(0, 2)));
    when(mockTrigger.onEventTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.FIRE_AND_PURGE);
    assertEquals(TriggerResult.FIRE_AND_PURGE, testHarness.advanceWatermark(3, new TimeWindow(0, 2)));
    when(mockTrigger.onEventTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.PURGE);
    assertEquals(TriggerResult.PURGE, testHarness.advanceWatermark(4, new TimeWindow(0, 2)));
    doAnswer(new Answer<TriggerResult>() {

        @Override
        public TriggerResult answer(InvocationOnMock invocation) throws Exception {
            Trigger.TriggerContext context = (Trigger.TriggerContext) invocation.getArguments()[3];
            // register some timers that we can step through to call onEventTime several
            // times in a row
            context.registerProcessingTimeTimer(1);
            context.registerProcessingTimeTimer(2);
            context.registerProcessingTimeTimer(3);
            context.registerProcessingTimeTimer(4);
            return TriggerResult.CONTINUE;
        }
    }).when(mockTrigger).onElement(Matchers.<Integer>anyObject(), anyLong(), anyTimeWindow(), anyTriggerContext());
    // set up our timers
    testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2));
    assertEquals(4, testHarness.numProcessingTimeTimers(new TimeWindow(0, 2)));
    assertEquals(0, testHarness.numEventTimeTimers(new TimeWindow(0, 2)));
    when(mockTrigger.onProcessingTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.CONTINUE);
    assertEquals(TriggerResult.CONTINUE, testHarness.advanceProcessingTime(1, new TimeWindow(0, 2)));
    when(mockTrigger.onProcessingTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.FIRE);
    assertEquals(TriggerResult.FIRE_AND_PURGE, testHarness.advanceProcessingTime(2, new TimeWindow(0, 2)));
    when(mockTrigger.onProcessingTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.FIRE_AND_PURGE);
    assertEquals(TriggerResult.FIRE_AND_PURGE, testHarness.advanceProcessingTime(3, new TimeWindow(0, 2)));
    when(mockTrigger.onProcessingTime(anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.PURGE);
    assertEquals(TriggerResult.PURGE, testHarness.advanceProcessingTime(4, new TimeWindow(0, 2)));
    testHarness.mergeWindows(new TimeWindow(0, 2), Collections.singletonList(new TimeWindow(0, 1)));
    verify(mockTrigger, times(1)).onMerge(anyTimeWindow(), anyOnMergeContext());
    testHarness.clearTriggerState(new TimeWindow(0, 2));
    verify(mockTrigger, times(1)).clear(eq(new TimeWindow(0, 2)), anyTriggerContext());
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) WindowOperatorContractTest.anyTimeWindow(org.apache.flink.streaming.runtime.operators.windowing.WindowOperatorContractTest.anyTimeWindow) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) PurgingTrigger(org.apache.flink.streaming.api.windowing.triggers.PurgingTrigger) Trigger(org.apache.flink.streaming.api.windowing.triggers.Trigger) InvocationOnMock(org.mockito.invocation.InvocationOnMock) WindowOperatorContractTest.anyTriggerContext(org.apache.flink.streaming.runtime.operators.windowing.WindowOperatorContractTest.anyTriggerContext) TriggerResult(org.apache.flink.streaming.api.windowing.triggers.TriggerResult) Test(org.junit.Test)

Example 74 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class EvictingWindowOperatorTest method testTimeEvictorEvictBefore.

/**
	 * Tests TimeEvictor evictBefore behavior
	 * @throws Exception
	 */
@Test
public void testTimeEvictorEvictBefore() throws Exception {
    AtomicInteger closeCalled = new AtomicInteger(0);
    final int TRIGGER_COUNT = 2;
    final int WINDOW_SIZE = 4;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    @SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(inputType.createSerializer(new ExecutionConfig()));
    ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
    EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new EvictingWindowOperator<>(TumblingEventTimeWindows.of(Time.of(WINDOW_SIZE, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>(closeCalled)), CountTrigger.of(TRIGGER_COUNT), TimeEvictor.of(Time.seconds(2)), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    long initialTime = 0L;
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 5999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 2001));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1001));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 3999));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 6500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1002));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 7999));
    expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 3999));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
    testHarness.close();
    Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) StreamElementSerializer(org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Test(org.junit.Test)

Example 75 with StreamRecord

use of org.apache.flink.streaming.runtime.streamrecord.StreamRecord in project flink by apache.

the class ProcessingTimeTriggerTest method testClear.

/**
	 * Verify that clear() does not leak across windows.
	 */
@Test
public void testClear() throws Exception {
    TriggerTestHarness<Object, TimeWindow> testHarness = new TriggerTestHarness<>(ProcessingTimeTrigger.create(), new TimeWindow.Serializer());
    assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(0, 2)));
    assertEquals(TriggerResult.CONTINUE, testHarness.processElement(new StreamRecord<Object>(1), new TimeWindow(2, 4)));
    assertEquals(0, testHarness.numStateEntries());
    assertEquals(0, testHarness.numEventTimeTimers());
    assertEquals(2, testHarness.numProcessingTimeTimers());
    assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(0, 2)));
    assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(2, 4)));
    testHarness.clearTriggerState(new TimeWindow(2, 4));
    assertEquals(0, testHarness.numStateEntries());
    assertEquals(0, testHarness.numEventTimeTimers());
    assertEquals(1, testHarness.numProcessingTimeTimers());
    assertEquals(1, testHarness.numProcessingTimeTimers(new TimeWindow(0, 2)));
    assertEquals(0, testHarness.numProcessingTimeTimers(new TimeWindow(2, 4)));
    testHarness.clearTriggerState(new TimeWindow(0, 2));
    assertEquals(0, testHarness.numStateEntries());
    assertEquals(0, testHarness.numProcessingTimeTimers());
    assertEquals(0, testHarness.numProcessingTimeTimers());
}
Also used : StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) Test(org.junit.Test)

Aggregations

StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)76 Test (org.junit.Test)50 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)27 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)27 StreamElementSerializer (org.apache.flink.streaming.runtime.streamrecord.StreamElementSerializer)27 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)21 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)20 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)19 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)19 Watermark (org.apache.flink.streaming.api.watermark.Watermark)17 RichFunction (org.apache.flink.api.common.functions.RichFunction)16 ArrayList (java.util.ArrayList)14 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)14 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)14 Map (java.util.Map)11 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)11 Event (org.apache.flink.cep.Event)11 HashMap (java.util.HashMap)10 PublicEvolving (org.apache.flink.annotation.PublicEvolving)9 MergingWindowAssigner (org.apache.flink.streaming.api.windowing.assigners.MergingWindowAssigner)9