use of org.apache.flink.streaming.runtime.tasks.OperatorStateHandles in project flink by apache.
the class WindowOperatorTest method testTumblingEventTimeWindows.
private void testTumblingEventTimeWindows(OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness) throws Exception {
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(999));
expectedOutput.add(new Watermark(999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(1999));
expectedOutput.add(new Watermark(1999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
// do a snapshot, close and restore again
OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processWatermark(new Watermark(2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
expectedOutput.add(new Watermark(2999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(3999));
expectedOutput.add(new Watermark(3999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(4999));
expectedOutput.add(new Watermark(4999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(5999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 5999));
expectedOutput.add(new Watermark(5999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
// those don't have any effect...
testHarness.processWatermark(new Watermark(6999));
testHarness.processWatermark(new Watermark(7999));
expectedOutput.add(new Watermark(6999));
expectedOutput.add(new Watermark(7999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
}
use of org.apache.flink.streaming.runtime.tasks.OperatorStateHandles in project flink by apache.
the class WindowOperatorTest method testPointSessions.
@Test
@SuppressWarnings("unchecked")
public /**
* This tests a custom Session window assigner that assigns some elements to "point windows",
* windows that have the same timestamp for start and end.
*
* <p> In this test, elements that have 33 as the second tuple field will be put into a point
* window.
*/
void testPointSessions() throws Exception {
closeCalled.set(0);
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(new PointSessionWindows(3000), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new SessionWindowFunction()), EventTimeTrigger.create(), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 33), 1000));
// do a snapshot, close and restore again
OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 33), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 33), 2500));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-36", 10L, 4000L), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-67", 0L, 3000L), 2999));
expectedOutput.add(new Watermark(12000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.streaming.runtime.tasks.OperatorStateHandles in project flink by apache.
the class WindowOperatorTest method testCountTrigger.
@Test
@SuppressWarnings("unchecked")
public void testCountTrigger() throws Exception {
closeCalled.set(0);
final int WINDOW_SIZE = 4;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new WindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>()), PurgingTrigger.of(CountTrigger.of(WINDOW_SIZE)), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// The global window actually ignores these timestamps...
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
// do a snapshot, close and restore again
OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
ConcurrentLinkedQueue<Object> outputBeforeClose = testHarness.getOutput();
stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
operator = new WindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>()), PurgingTrigger.of(CountTrigger.of(WINDOW_SIZE)), 0, null);
testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, Iterables.concat(outputBeforeClose, testHarness.getOutput()), new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, Iterables.concat(outputBeforeClose, testHarness.getOutput()), new Tuple2ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.streaming.runtime.tasks.OperatorStateHandles in project flink by apache.
the class WindowOperatorTest method testSessionWindowsWithContinuousEventTimeTrigger.
/**
* This tests whether merging works correctly with the ContinuousEventTimeTrigger.
* @throws Exception
*/
@Test
@SuppressWarnings("unchecked")
public void testSessionWindowsWithContinuousEventTimeTrigger() throws Exception {
closeCalled.set(0);
final int SESSION_SIZE = 3;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(SESSION_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new SessionWindowFunction()), ContinuousEventTimeTrigger.of(Time.seconds(2)), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order and first trigger time is 2000
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 1500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
// triggers emit and next trigger time is 4000
testHarness.processWatermark(new Watermark(2500));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-1", 1500L, 4500L), 4499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new Watermark(2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 4000));
testHarness.processWatermark(new Watermark(3000));
expectedOutput.add(new Watermark(3000));
// do a snapshot, close and restore again
OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 4000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500));
// triggers emit and next trigger time is 6000
testHarness.processWatermark(new Watermark(4000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-3", 1500L, 7000L), 6999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-15", 0L, 7000L), 6999));
expectedOutput.add(new Watermark(4000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.streaming.runtime.tasks.OperatorStateHandles in project flink by apache.
the class WindowOperatorContractTest method testStateSnapshotAndRestore.
@Test
public void testStateSnapshotAndRestore() throws Exception {
MergingWindowAssigner<Integer, TimeWindow> mockAssigner = mockMergingAssigner();
Trigger<Integer, TimeWindow> mockTrigger = mockTrigger();
InternalWindowFunction<Iterable<Integer>, Void, Integer, TimeWindow> mockWindowFunction = mockWindowFunction();
KeyedOneInputStreamOperatorTestHarness<Integer, Integer, Void> testHarness = createWindowOperator(mockAssigner, mockTrigger, 0L, intListDescriptor, mockWindowFunction);
testHarness.open();
when(mockAssigner.assignWindows(anyInt(), anyLong(), anyAssignerContext())).thenReturn(Arrays.asList(new TimeWindow(2, 4), new TimeWindow(0, 2)));
assertEquals(0, testHarness.getOutput().size());
assertEquals(0, testHarness.numKeyedStateEntries());
doAnswer(new Answer<TriggerResult>() {
@Override
public TriggerResult answer(InvocationOnMock invocation) throws Exception {
Trigger.TriggerContext context = (Trigger.TriggerContext) invocation.getArguments()[3];
context.registerEventTimeTimer(0L);
context.getPartitionedState(valueStateDescriptor).update("hello");
return TriggerResult.CONTINUE;
}
}).when(mockTrigger).onElement(Matchers.<Integer>anyObject(), anyLong(), anyTimeWindow(), anyTriggerContext());
shouldFireAndPurgeOnEventTime(mockTrigger);
testHarness.processElement(new StreamRecord<>(0, 0L));
// window-contents and trigger state for two windows plus merging window set
assertEquals(5, testHarness.numKeyedStateEntries());
// timers/gc timers for two windows
assertEquals(4, testHarness.numEventTimeTimers());
OperatorStateHandles snapshot = testHarness.snapshot(0, 0);
// restore
mockAssigner = mockMergingAssigner();
mockTrigger = mockTrigger();
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Exception {
Trigger.TriggerContext context = (Trigger.TriggerContext) invocation.getArguments()[1];
context.deleteEventTimeTimer(0L);
context.getPartitionedState(valueStateDescriptor).clear();
return null;
}
}).when(mockTrigger).clear(anyTimeWindow(), anyTriggerContext());
// only fire on the timestamp==0L timers, not the gc timers
when(mockTrigger.onEventTime(eq(0L), Matchers.<TimeWindow>any(), anyTriggerContext())).thenReturn(TriggerResult.FIRE);
mockWindowFunction = mockWindowFunction();
testHarness = createWindowOperator(mockAssigner, mockTrigger, 0L, intListDescriptor, mockWindowFunction);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
assertEquals(0, testHarness.extractOutputStreamRecords().size());
// verify that we still have all the state/timers/merging window set
assertEquals(5, testHarness.numKeyedStateEntries());
// timers/gc timers for two windows
assertEquals(4, testHarness.numEventTimeTimers());
verify(mockTrigger, never()).clear(anyTimeWindow(), anyTriggerContext());
testHarness.processWatermark(new Watermark(20L));
verify(mockTrigger, times(2)).clear(anyTimeWindow(), anyTriggerContext());
verify(mockWindowFunction, times(2)).apply(eq(0), anyTimeWindow(), anyIntIterable(), WindowOperatorContractTest.<Void>anyCollector());
verify(mockWindowFunction, times(1)).apply(eq(0), eq(new TimeWindow(0, 2)), intIterable(0), WindowOperatorContractTest.<Void>anyCollector());
verify(mockWindowFunction, times(1)).apply(eq(0), eq(new TimeWindow(2, 4)), intIterable(0), WindowOperatorContractTest.<Void>anyCollector());
// it's also called for the cleanup timers
verify(mockTrigger, times(4)).onEventTime(anyLong(), anyTimeWindow(), anyTriggerContext());
verify(mockTrigger, times(1)).onEventTime(eq(0L), eq(new TimeWindow(0, 2)), anyTriggerContext());
verify(mockTrigger, times(1)).onEventTime(eq(0L), eq(new TimeWindow(2, 4)), anyTriggerContext());
assertEquals(0, testHarness.numKeyedStateEntries());
assertEquals(0, testHarness.numEventTimeTimers());
}
Aggregations