use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testCleanupTimerWithEmptyReduceStateForTumblingWindows.
@Test
public void testCleanupTimerWithEmptyReduceStateForTumblingWindows() throws Exception {
final int windowSize = 2;
final long lateness = 1;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), EventTimeTrigger.create(), lateness, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
// normal element
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1599));
testHarness.processWatermark(new Watermark(1999));
testHarness.processWatermark(new Watermark(2000));
testHarness.processWatermark(new Watermark(5000));
expected.add(new Watermark(1599));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
// here it fires and purges
expected.add(new Watermark(1999));
// here is the cleanup timer
expected.add(new Watermark(2000));
expected.add(new Watermark(5000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testLateness.
@Test
public void testLateness() throws Exception {
final int windowSize = 2;
final long lateness = 500;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), PurgingTrigger.of(EventTimeTrigger.create()), lateness, lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> lateExpected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 500));
testHarness.processWatermark(new Watermark(1500));
expected.add(new Watermark(1500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1300));
testHarness.processWatermark(new Watermark(2300));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 2), 1999));
expected.add(new Watermark(2300));
// this will not be sideoutput because window.maxTimestamp() + allowedLateness >
// currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1997));
testHarness.processWatermark(new Watermark(6000));
// this is 1 and not 3 because the trigger fires and purges
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
expected.add(new Watermark(6000));
// this will be side output because window.maxTimestamp() + allowedLateness <
// currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processWatermark(new Watermark(7000));
lateExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
expected.add(new Watermark(7000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted("SideOutput was not correct.", lateExpected, (Iterable) testHarness.getSideOutput(lateOutputTag), new Tuple2ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testNotSideOutputDueToLatenessSessionWithHugeLatenessPurgingTrigger.
@Test
public void testNotSideOutputDueToLatenessSessionWithHugeLatenessPurgingTrigger() throws Exception {
final int gapSize = 3;
final long lateness = 10000;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(gapSize)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), PurgingTrigger.of(EventTimeTrigger.create()), lateness, lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new Watermark(4998));
// this will not be sideoutput because the session we're adding two has maxTimestamp
// after the current watermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
testHarness.processWatermark(new Watermark(7400));
expected.add(new Watermark(7400));
// this will merge the two sessions into one
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processWatermark(new Watermark(11501));
expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
expected.add(new Watermark(11501));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
testHarness.processWatermark(new Watermark(14600));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
expected.add(new Watermark(14600));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 1000L, 14600L), 14599));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual = testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertEquals(null, sideActual);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
testHarness.processWatermark(new Watermark(20000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 1000L, 17500L), 17499));
expected.add(new Watermark(20000));
testHarness.processWatermark(new Watermark(100000));
expected.add(new Watermark(100000));
actual = testHarness.getOutput();
sideActual = testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertEquals(null, sideActual);
testHarness.close();
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testProcessingTimeSlidingWindows.
@Test
public void testProcessingTimeSlidingWindows() throws Throwable {
final int windowSize = 3;
final int windowSlide = 1;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(SlidingProcessingTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS), Time.of(windowSlide, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), ProcessingTimeTrigger.create(), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// timestamp is ignored in processing time
testHarness.setProcessingTime(3);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(1000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(2000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 1999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(3000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 2999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(7000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 5), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 5), 4999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 5999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testSlidingEventTimeWindowsReduce.
@Test
@SuppressWarnings("unchecked")
public void testSlidingEventTimeWindowsReduce() throws Exception {
closeCalled.set(0);
final int windowSize = 3;
final int windowSlide = 1;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(SlidingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS), Time.of(windowSlide, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), EventTimeTrigger.create(), 0, null);
testSlidingEventTimeWindows(operator);
}
Aggregations