use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testContinuousWatermarkTrigger.
@Test
@SuppressWarnings("unchecked")
public void testContinuousWatermarkTrigger() throws Exception {
closeCalled.set(0);
final int windowSize = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new WindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, GlobalWindow, Tuple2<String, Integer>>()), ContinuousEventTimeTrigger.of(Time.of(windowSize, TimeUnit.SECONDS)), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// The global window actually ignores these timestamps...
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1000));
expectedOutput.add(new Watermark(1000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(2000));
expectedOutput.add(new Watermark(2000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(3000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), Long.MAX_VALUE));
expectedOutput.add(new Watermark(3000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(4000));
expectedOutput.add(new Watermark(4000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(5000));
expectedOutput.add(new Watermark(5000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(6000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 5), Long.MAX_VALUE));
expectedOutput.add(new Watermark(6000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
// those don't have any effect...
testHarness.processWatermark(new Watermark(7000));
testHarness.processWatermark(new Watermark(8000));
expectedOutput.add(new Watermark(7000));
expectedOutput.add(new Watermark(8000));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testProcessingTimeSessionWindows.
@Test
public void testProcessingTimeSessionWindows() throws Throwable {
final int windowGap = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(ProcessingTimeSessionWindows.withGap(Time.of(windowGap, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), ProcessingTimeTrigger.create(), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// timestamp is ignored in processing time
testHarness.setProcessingTime(3);
testHarness.processElement(// Long.MAX_VALUE));
new StreamRecord<>(new Tuple2<>("key2", 1), 1));
testHarness.setProcessingTime(1000);
testHarness.processElement(// Long.MAX_VALUE));
new StreamRecord<>(new Tuple2<>("key2", 1), 1002));
testHarness.setProcessingTime(5000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 3999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
testHarness.setProcessingTime(10000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 7999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 7999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
assertEquals(expectedOutput.size(), testHarness.getOutput().size());
for (Object elem : testHarness.getOutput()) {
if (elem instanceof StreamRecord) {
StreamRecord<Tuple2<String, Integer>> el = (StreamRecord<Tuple2<String, Integer>>) elem;
assertTrue(expectedOutput.contains(el));
}
}
testHarness.close();
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorTest method testSideOutputDueToLatenessTumbling.
@Test
public void testSideOutputDueToLatenessTumbling() throws Exception {
final int windowSize = 2;
final long lateness = 0;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(TumblingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), EventTimeTrigger.create(), lateness, lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
// normal element
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1985));
expected.add(new Watermark(1985));
// this will not be dropped because window.maxTimestamp() + allowedLateness >
// currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1980));
testHarness.processWatermark(new Watermark(1999));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 2), 1999));
expected.add(new Watermark(1999));
// sideoutput as late, will reuse previous timestamp since only input tuple is sideoutputed
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2001));
testHarness.processWatermark(new Watermark(2999));
expected.add(new Watermark(2999));
testHarness.processWatermark(new Watermark(3999));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
expected.add(new Watermark(3999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, testHarness.getOutput(), new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted("SideOutput was not correct.", sideExpected, (Iterable) testHarness.getSideOutput(lateOutputTag), new Tuple2ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class WindowOperatorContractTest method testStateTypeIsConsistentFromWindowStateAndGlobalState.
@Test
public void testStateTypeIsConsistentFromWindowStateAndGlobalState() throws Exception {
class NoOpAggregateFunction implements AggregateFunction<String, String, String> {
@Override
public String createAccumulator() {
return null;
}
@Override
public String add(String value, String accumulator) {
return null;
}
@Override
public String getResult(String accumulator) {
return null;
}
@Override
public String merge(String a, String b) {
return null;
}
}
WindowAssigner<Integer, TimeWindow> mockAssigner = mockTimeWindowAssigner();
Trigger<Integer, TimeWindow> mockTrigger = mockTrigger();
InternalWindowFunction<Iterable<Integer>, Void, Integer, TimeWindow> mockWindowFunction = mockWindowFunction();
KeyedOneInputStreamOperatorTestHarness<Integer, Integer, Void> testHarness = createWindowOperator(mockAssigner, mockTrigger, 20L, mockWindowFunction);
testHarness.open();
when(mockTrigger.onElement(anyInt(), anyLong(), anyTimeWindow(), anyTriggerContext())).thenReturn(TriggerResult.FIRE);
when(mockAssigner.assignWindows(anyInt(), anyLong(), anyAssignerContext())).thenReturn(Arrays.asList(new TimeWindow(0, 20)));
AtomicBoolean processWasInvoked = new AtomicBoolean(false);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
InternalWindowFunction.InternalWindowContext context = (InternalWindowFunction.InternalWindowContext) invocationOnMock.getArguments()[2];
KeyedStateStore windowKeyedStateStore = context.windowState();
KeyedStateStore globalKeyedStateStore = context.globalState();
ListStateDescriptor<String> windowListStateDescriptor = new ListStateDescriptor<String>("windowListState", String.class);
ListStateDescriptor<String> globalListStateDescriptor = new ListStateDescriptor<String>("globalListState", String.class);
assertEquals(windowKeyedStateStore.getListState(windowListStateDescriptor).getClass(), globalKeyedStateStore.getListState(globalListStateDescriptor).getClass());
ValueStateDescriptor<String> windowValueStateDescriptor = new ValueStateDescriptor<String>("windowValueState", String.class);
ValueStateDescriptor<String> globalValueStateDescriptor = new ValueStateDescriptor<String>("globalValueState", String.class);
assertEquals(windowKeyedStateStore.getState(windowValueStateDescriptor).getClass(), globalKeyedStateStore.getState(globalValueStateDescriptor).getClass());
AggregatingStateDescriptor<String, String, String> windowAggStateDesc = new AggregatingStateDescriptor<String, String, String>("windowAgg", new NoOpAggregateFunction(), String.class);
AggregatingStateDescriptor<String, String, String> globalAggStateDesc = new AggregatingStateDescriptor<String, String, String>("globalAgg", new NoOpAggregateFunction(), String.class);
assertEquals(windowKeyedStateStore.getAggregatingState(windowAggStateDesc).getClass(), globalKeyedStateStore.getAggregatingState(globalAggStateDesc).getClass());
ReducingStateDescriptor<String> windowReducingStateDesc = new ReducingStateDescriptor<String>("windowReducing", (a, b) -> a, String.class);
ReducingStateDescriptor<String> globalReducingStateDesc = new ReducingStateDescriptor<String>("globalReducing", (a, b) -> a, String.class);
assertEquals(windowKeyedStateStore.getReducingState(windowReducingStateDesc).getClass(), globalKeyedStateStore.getReducingState(globalReducingStateDesc).getClass());
MapStateDescriptor<String, String> windowMapStateDescriptor = new MapStateDescriptor<String, String>("windowMapState", String.class, String.class);
MapStateDescriptor<String, String> globalMapStateDescriptor = new MapStateDescriptor<String, String>("globalMapState", String.class, String.class);
assertEquals(windowKeyedStateStore.getMapState(windowMapStateDescriptor).getClass(), globalKeyedStateStore.getMapState(globalMapStateDescriptor).getClass());
processWasInvoked.set(true);
return null;
}
}).when(mockWindowFunction).process(anyInt(), anyTimeWindow(), anyInternalWindowContext(), anyIntIterable(), WindowOperatorContractTest.<Void>anyCollector());
testHarness.processElement(new StreamRecord<>(0, 0L));
assertTrue(processWasInvoked.get());
}
use of org.apache.flink.api.common.state.ReducingStateDescriptor in project flink by apache.
the class AllWindowTranslationTest method testReduceWithProcessWindowFunctionProcessingTime.
@Test
@SuppressWarnings("rawtypes")
public void testReduceWithProcessWindowFunctionProcessingTime() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2));
DataStream<Tuple3<String, String, Integer>> window = source.windowAll(TumblingProcessingTimeWindows.of(Time.of(1, TimeUnit.SECONDS))).reduce(new DummyReducer(), new ProcessAllWindowFunction<Tuple2<String, Integer>, Tuple3<String, String, Integer>, TimeWindow>() {
private static final long serialVersionUID = 1L;
@Override
public void process(Context ctx, Iterable<Tuple2<String, Integer>> values, Collector<Tuple3<String, String, Integer>> out) throws Exception {
for (Tuple2<String, Integer> in : values) {
out.collect(new Tuple3<>(in.f0, in.f0, in.f1));
}
}
});
OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window.getTransformation();
OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator();
Assert.assertTrue(operator instanceof WindowOperator);
WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator;
Assert.assertTrue(winOperator.getTrigger() instanceof ProcessingTimeTrigger);
Assert.assertTrue(winOperator.getWindowAssigner() instanceof TumblingProcessingTimeWindows);
Assert.assertTrue(winOperator.getStateDescriptor() instanceof ReducingStateDescriptor);
processElementAndEnsureOutput(operator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1));
}
Aggregations