use of org.apache.flink.streaming.api.windowing.windows.GlobalWindow in project flink by apache.
the class EvictingWindowOperatorTest method testCountEvictorEvictAfter.
/**
* Tests CountEvictor evictAfter behavior.
*/
@Test
public void testCountEvictorEvictAfter() throws Exception {
AtomicInteger closeCalled = new AtomicInteger(0);
final int windowSize = 4;
final int triggerCount = 2;
final boolean evictAfter = true;
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(triggerCount), CountEvictor.of(windowSize, evictAfter), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
use of org.apache.flink.streaming.api.windowing.windows.GlobalWindow in project flink by apache.
the class EvictingWindowOperatorTest method testTimeEvictorNoTimestamp.
/**
* Tests time evictor, if no timestamp information in the StreamRecord. No element will be
* evicted from the window.
*/
@Test
public void testTimeEvictorNoTimestamp() throws Exception {
AtomicInteger closeCalled = new AtomicInteger(0);
final int triggerCount = 2;
final boolean evictAfter = true;
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(triggerCount), TimeEvictor.of(Time.seconds(2), evictAfter), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1)));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1)));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 6), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
use of org.apache.flink.streaming.api.windowing.windows.GlobalWindow in project flink by apache.
the class EvictingWindowOperatorTest method testCountTriggerWithApply.
@Test
@SuppressWarnings("unchecked")
public void testCountTriggerWithApply() throws Exception {
AtomicInteger closeCalled = new AtomicInteger(0);
final int windowSize = 4;
final int windowSlide = 2;
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(windowSlide), CountEvictor.of(windowSize), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// The global window actually ignores these timestamps...
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
use of org.apache.flink.streaming.api.windowing.windows.GlobalWindow in project flink by apache.
the class EvictingWindowOperatorTest method testDeltaEvictorEvictAfter.
/**
* Tests DeltaEvictor, evictAfter behavior.
*/
@Test
public void testDeltaEvictorEvictAfter() throws Exception {
AtomicInteger closeCalled = new AtomicInteger(0);
final int triggerCount = 2;
final boolean evictAfter = true;
final int threshold = 2;
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(triggerCount), DeltaEvictor.of(threshold, new DeltaFunction<Tuple2<String, Integer>>() {
@Override
public double getDelta(Tuple2<String, Integer> oldDataPoint, Tuple2<String, Integer> newDataPoint) {
return newDataPoint.f1 - oldDataPoint.f1;
}
}, evictAfter), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), initialTime + 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 5), initialTime + 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), initialTime + 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), initialTime + 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 5), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 15), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 9), initialTime + 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 16), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 22), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
use of org.apache.flink.streaming.api.windowing.windows.GlobalWindow in project flink by apache.
the class EvictingWindowOperatorTest method testDeltaEvictorEvictBefore.
/**
* Tests DeltaEvictor, evictBefore behavior.
*/
@Test
public void testDeltaEvictorEvictBefore() throws Exception {
AtomicInteger closeCalled = new AtomicInteger(0);
final int triggerCount = 2;
final boolean evictAfter = false;
final int threshold = 2;
@SuppressWarnings({ "unchecked", "rawtypes" }) TypeSerializer<StreamRecord<Tuple2<String, Integer>>> streamRecordSerializer = (TypeSerializer<StreamRecord<Tuple2<String, Integer>>>) new StreamElementSerializer(STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
ListStateDescriptor<StreamRecord<Tuple2<String, Integer>>> stateDesc = new ListStateDescriptor<>("window-contents", streamRecordSerializer);
EvictingWindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, GlobalWindow> operator = new EvictingWindowOperator<>(GlobalWindows.create(), new GlobalWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new RichSumReducer<GlobalWindow>(closeCalled)), CountTrigger.of(triggerCount), DeltaEvictor.of(threshold, new DeltaFunction<Tuple2<String, Integer>>() {
@Override
public double getDelta(Tuple2<String, Integer> oldDataPoint, Tuple2<String, Integer> newDataPoint) {
return newDataPoint.f1 - oldDataPoint.f1;
}
}, evictAfter), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
long initialTime = 0L;
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), initialTime + 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime + 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), initialTime));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 5), initialTime + 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), initialTime + 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), initialTime + 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 11), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), initialTime + 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), initialTime + 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 8), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 10), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new ResultSortComparator());
testHarness.close();
Assert.assertEquals("Close was not called.", 1, closeCalled.get());
}
Aggregations