Search in sources :

Example 6 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class WindowOperatorTest method testDropDueToLatenessSessionWithLatenessPurgingTrigger.

@Test
public void testDropDueToLatenessSessionWithLatenessPurgingTrigger() throws Exception {
    // this has the same output as testSideOutputDueToLatenessSessionZeroLateness() because
    // the allowed lateness is too small to make a difference
    final int GAP_SIZE = 3;
    final long LATENESS = 10;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(GAP_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), PurgingTrigger.of(EventTimeTrigger.create()), LATENESS, lateOutputTag);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1999));
    expected.add(new Watermark(1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new Watermark(4998));
    // this will not be dropped because the session we're adding two has maxTimestamp
    // after the current watermark
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
    testHarness.processWatermark(new Watermark(7400));
    expected.add(new Watermark(7400));
    // this will merge the two sessions into one
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
    testHarness.processWatermark(new Watermark(11501));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
    expected.add(new Watermark(11501));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
    expected.add(new Watermark(14600));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 10000L, 14600L), 14599));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
    testHarness.processWatermark(new Watermark(20000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 10000L, 17500L), 17499));
    expected.add(new Watermark(20000));
    testHarness.processWatermark(new Watermark(100000));
    expected.add(new Watermark(100000));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 7 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class WindowOperatorTest method testReduceSessionWindows.

@Test
@SuppressWarnings("unchecked")
public void testReduceSessionWindows() throws Exception {
    closeCalled.set(0);
    final int SESSION_SIZE = 3;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(SESSION_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), EventTimeTrigger.create(), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
    // do a snapshot, close and restore again
    OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
    testHarness.close();
    testHarness.setup();
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
    testHarness.processWatermark(new Watermark(12000));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
    expectedOutput.add(new Watermark(12000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
    testHarness.processWatermark(new Watermark(17999));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
    expectedOutput.add(new Watermark(17999));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OperatorStateHandles(org.apache.flink.streaming.runtime.tasks.OperatorStateHandles) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 8 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class WindowOperatorTest method testSessionWindowsWithCountTrigger.

/**
	 * This tests whether merging works correctly with the CountTrigger.
	 * @throws Exception
	 */
@Test
@SuppressWarnings("unchecked")
public void testSessionWindowsWithCountTrigger() throws Exception {
    closeCalled.set(0);
    final int SESSION_SIZE = 3;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(SESSION_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new SessionWindowFunction()), PurgingTrigger.of(CountTrigger.of(4)), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
    // do a snapshot, close and restore again
    OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
    testHarness.close();
    testHarness.setup();
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 6000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 6500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 7000));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-10", 0L, 6500L), 6499));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
    // add an element that merges the two "key1" sessions, they should now have count 6, and therfore fire
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 10), 4500));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-22", 10L, 10000L), 9999L));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
    testHarness.close();
}
Also used : ListStateDescriptor(org.apache.flink.api.common.state.ListStateDescriptor) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OperatorStateHandles(org.apache.flink.streaming.runtime.tasks.OperatorStateHandles) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Test(org.junit.Test)

Example 9 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class WindowOperatorTest method testCleanupTimerWithEmptyFoldingStateForSessionWindows.

// TODO this test seems invalid, as it uses the unsupported combination of merging windows and folding window state
@Test
public void testCleanupTimerWithEmptyFoldingStateForSessionWindows() throws Exception {
    final int GAP_SIZE = 3;
    final long LATENESS = 10;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    FoldingStateDescriptor<Tuple2<String, Integer>, Tuple2<String, Integer>> windowStateDesc = new FoldingStateDescriptor<>("window-contents", new Tuple2<>((String) null, 0), new FoldFunction<Tuple2<String, Integer>, Tuple2<String, Integer>>() {

        private static final long serialVersionUID = 1L;

        @Override
        public Tuple2<String, Integer> fold(Tuple2<String, Integer> accumulator, Tuple2<String, Integer> value) throws Exception {
            return new Tuple2<>(value.f0, accumulator.f1 + value.f1);
        }
    }, inputType);
    windowStateDesc.initializeSerializerUnlessSet(new ExecutionConfig());
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(GAP_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), windowStateDesc, new InternalSingleValueWindowFunction<>(new PassThroughFunction()), EventTimeTrigger.create(), LATENESS, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
    expected.add(new Watermark(4998));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new Watermark(14600));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) FoldingStateDescriptor(org.apache.flink.api.common.state.FoldingStateDescriptor) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 10 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class WindowOperatorTest method testReduceSessionWindowsWithProcessFunction.

@Test
@SuppressWarnings("unchecked")
public void testReduceSessionWindowsWithProcessFunction() throws Exception {
    closeCalled.set(0);
    final int SESSION_SIZE = 3;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(SESSION_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueProcessWindowFunction<>(new ReducedProcessSessionWindowFunction()), EventTimeTrigger.create(), 0, null);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    // add elements out-of-order
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
    // do a snapshot, close and restore again
    OperatorStateHandles snapshot = testHarness.snapshot(0L, 0L);
    testHarness.close();
    testHarness.setup();
    testHarness.initializeState(snapshot);
    testHarness.open();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
    testHarness.processWatermark(new Watermark(12000));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
    expectedOutput.add(new Watermark(12000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
    testHarness.processWatermark(new Watermark(17999));
    expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
    expectedOutput.add(new Watermark(17999));
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) OperatorStateHandles(org.apache.flink.streaming.runtime.tasks.OperatorStateHandles) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Aggregations

ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)236 Test (org.junit.Test)102 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)56 Watermark (org.apache.flink.streaming.api.watermark.Watermark)52 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)43 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)40 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)40 CountDownLatch (java.util.concurrent.CountDownLatch)37 ArrayList (java.util.ArrayList)31 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)28 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)18 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)17 IOException (java.io.IOException)15 Tuple3 (org.apache.flink.api.java.tuple.Tuple3)15 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)14 ExecutionException (java.util.concurrent.ExecutionException)13 ExecutorService (java.util.concurrent.ExecutorService)13 Map (java.util.Map)12 OperatorStateHandles (org.apache.flink.streaming.runtime.tasks.OperatorStateHandles)12 Iterator (java.util.Iterator)11