use of org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness in project flink by apache.
the class StreamGroupedFoldTest method testOpenClose.
@Test
public void testOpenClose() throws Exception {
KeySelector<Integer, Integer> keySelector = new KeySelector<Integer, Integer>() {
@Override
public Integer getKey(Integer value) {
return value;
}
};
StreamGroupedFold<Integer, String, Integer> operator = new StreamGroupedFold<>(new TestOpenCloseFoldFunction(), "init");
operator.setOutputType(BasicTypeInfo.STRING_TYPE_INFO, new ExecutionConfig());
OneInputStreamOperatorTestHarness<Integer, String> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, BasicTypeInfo.INT_TYPE_INFO);
long initialTime = 0L;
testHarness.open();
testHarness.processElement(new StreamRecord<>(1, initialTime));
testHarness.processElement(new StreamRecord<>(2, initialTime));
testHarness.close();
assertTrue("RichFunction methods where not called.", TestOpenCloseFoldFunction.closeCalled);
assertTrue("Output contains no elements.", testHarness.getOutput().size() > 0);
}
use of org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness in project flink by apache.
the class StreamGroupedReduceTest method testGroupedReduce.
@Test
public void testGroupedReduce() throws Exception {
KeySelector<Integer, Integer> keySelector = new IntegerKeySelector();
StreamGroupedReduce<Integer> operator = new StreamGroupedReduce<>(new MyReducer(), IntSerializer.INSTANCE);
OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, keySelector, BasicTypeInfo.INT_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
testHarness.processElement(new StreamRecord<>(1, initialTime + 2));
testHarness.processWatermark(new Watermark(initialTime + 2));
testHarness.processElement(new StreamRecord<>(2, initialTime + 3));
testHarness.processElement(new StreamRecord<>(2, initialTime + 4));
testHarness.processElement(new StreamRecord<>(3, initialTime + 5));
expectedOutput.add(new StreamRecord<>(1, initialTime + 1));
expectedOutput.add(new StreamRecord<>(2, initialTime + 2));
expectedOutput.add(new Watermark(initialTime + 2));
expectedOutput.add(new StreamRecord<>(2, initialTime + 3));
expectedOutput.add(new StreamRecord<>(4, initialTime + 4));
expectedOutput.add(new StreamRecord<>(3, initialTime + 5));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
use of org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness in project flink by apache.
the class KeyedProcessOperatorTest method testEventTimeTimers.
@Test
public void testEventTimeTimers() throws Exception {
KeyedProcessOperator<Integer, Integer, Integer> operator = new KeyedProcessOperator<>(new TriggeringFlatMapFunction(TimeDomain.EVENT_TIME));
OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new IdentityKeySelector<Integer>(), BasicTypeInfo.INT_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.processWatermark(new Watermark(0));
testHarness.processElement(new StreamRecord<>(17, 42L));
testHarness.processWatermark(new Watermark(5));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new Watermark(0L));
expectedOutput.add(new StreamRecord<>(17, 42L));
expectedOutput.add(new StreamRecord<>(1777, 5L));
expectedOutput.add(new Watermark(5L));
TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
use of org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness in project flink by apache.
the class WindowOperatorMigrationTest method testRestoreReducingEventTimeWindowsFromFlink11.
@Test
@SuppressWarnings("unchecked")
public void testRestoreReducingEventTimeWindowsFromFlink11() throws Exception {
final int WINDOW_SIZE = 3;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(TumblingEventTimeWindows.of(Time.of(WINDOW_SIZE, TimeUnit.SECONDS)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new PassThroughWindowFunction<String, TimeWindow, Tuple2<String, Integer>>()), EventTimeTrigger.create(), 0, null);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
/*
operator.setInputType(TypeInfoParser.<Tuple2<String, Integer>>parse("Tuple2<String, Integer>"), new ExecutionConfig());
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness =
new OneInputStreamOperatorTestHarness<>(operator);
testHarness.configureForKeyedStream(new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(999));
expectedOutput.add(new Watermark(999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(1999));
expectedOutput.add(new Watermark(1999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
// do snapshot and save to file
StreamTaskState snapshot = testHarness.snapshot(0L, 0L);
testHarness.snaphotToFile(snapshot, "src/test/resources/win-op-migration-test-reduce-event-time-flink1.1-snapshot");
testHarness.close();
*/
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.initializeStateFromLegacyCheckpoint(getResourceFilename("win-op-migration-test-reduce-event-time-flink1.1-snapshot"));
testHarness.open();
testHarness.processWatermark(new Watermark(2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
expectedOutput.add(new Watermark(2999));
testHarness.processWatermark(new Watermark(3999));
expectedOutput.add(new Watermark(3999));
testHarness.processWatermark(new Watermark(4999));
expectedOutput.add(new Watermark(4999));
testHarness.processWatermark(new Watermark(5999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 5999));
expectedOutput.add(new Watermark(5999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple2ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness in project flink by apache.
the class WindowOperatorTest method testSessionWindowsWithProcessFunction.
@Test
@SuppressWarnings("unchecked")
public void testSessionWindowsWithProcessFunction() throws Exception {
closeCalled.set(0);
final int SESSION_SIZE = 3;
TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(SESSION_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableProcessWindowFunction<>(new SessionProcessWindowFunction()), EventTimeTrigger.create(), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
// do a snapshot, close and restore again
OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
expectedOutput.add(new Watermark(12000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
testHarness.processWatermark(new Watermark(17999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
expectedOutput.add(new Watermark(17999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
testHarness.close();
}
Aggregations