Search in sources :

Example 16 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class WindowOperatorTest method testSideOutputDueToLatenessSessionZeroLateness.

@Test
public void testSideOutputDueToLatenessSessionZeroLateness() throws Exception {
    final int GAP_SIZE = 3;
    final long LATENESS = 0;
    TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");
    ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), inputType.createSerializer(new ExecutionConfig()));
    WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(GAP_SIZE)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), EventTimeTrigger.create(), LATENESS, lateOutputTag);
    OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.open();
    ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
    ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
    testHarness.processWatermark(new Watermark(1999));
    expected.add(new Watermark(1999));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
    testHarness.processWatermark(new Watermark(4998));
    expected.add(new Watermark(4998));
    // this will not be dropped because the session we're adding two has maxTimestamp
    // after the current watermark
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
    testHarness.processWatermark(new Watermark(7400));
    expected.add(new Watermark(7400));
    // this will merge the two sessions into one
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
    testHarness.processWatermark(new Watermark(11501));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
    expected.add(new Watermark(11501));
    // new session
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
    testHarness.processWatermark(new Watermark(14600));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
    expected.add(new Watermark(14600));
    // this is sideoutput as late, reuse last timestamp
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
    testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
    testHarness.processWatermark(new Watermark(20000));
    expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 14500L, 17500L), 17499));
    expected.add(new Watermark(20000));
    testHarness.processWatermark(new Watermark(100000));
    expected.add(new Watermark(100000));
    ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
    ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual = testHarness.getSideOutput(lateOutputTag);
    TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
    TestHarnessUtil.assertOutputEqualsSorted("SideOutput was not correct.", sideExpected, (Iterable) sideActual, new Tuple2ResultSortComparator());
    testHarness.close();
}
Also used : ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) ReducingStateDescriptor(org.apache.flink.api.common.state.ReducingStateDescriptor) StreamRecord(org.apache.flink.streaming.runtime.streamrecord.StreamRecord) TimeWindow(org.apache.flink.streaming.api.windowing.windows.TimeWindow) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Tuple2(org.apache.flink.api.java.tuple.Tuple2) Tuple3(org.apache.flink.api.java.tuple.Tuple3) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Watermark(org.apache.flink.streaming.api.watermark.Watermark) Test(org.junit.Test)

Example 17 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class OneInputStreamTaskTest method testCheckpointBarriers.

/**
	 * This test verifies that checkpoint barriers are correctly forwarded.
	 */
@Test
public void testCheckpointBarriers() throws Exception {
    final OneInputStreamTask<String, String> mapTask = new OneInputStreamTask<String, String>();
    final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<String, String>(mapTask, 2, 2, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.setupOutputForSingletonOperatorChain();
    StreamConfig streamConfig = testHarness.getStreamConfig();
    StreamMap<String, String> mapOperator = new StreamMap<String, String>(new IdentityMap());
    streamConfig.setStreamOperator(mapOperator);
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<Object>();
    long initialTime = 0L;
    testHarness.invoke();
    testHarness.waitForTaskRunning();
    testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 0, 0);
    // These elements should be buffered until we receive barriers from
    // all inputs
    testHarness.processElement(new StreamRecord<String>("Hello-0-0", initialTime), 0, 0);
    testHarness.processElement(new StreamRecord<String>("Ciao-0-0", initialTime), 0, 0);
    // These elements should be forwarded, since we did not yet receive a checkpoint barrier
    // on that input, only add to same input, otherwise we would not know the ordering
    // of the output since the Task might read the inputs in any order
    testHarness.processElement(new StreamRecord<String>("Hello-1-1", initialTime), 1, 1);
    testHarness.processElement(new StreamRecord<String>("Ciao-1-1", initialTime), 1, 1);
    expectedOutput.add(new StreamRecord<String>("Hello-1-1", initialTime));
    expectedOutput.add(new StreamRecord<String>("Ciao-1-1", initialTime));
    testHarness.waitForInputProcessing();
    // we should not yet see the barrier, only the two elements from non-blocked input
    TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
    testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 0, 1);
    testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 1, 0);
    testHarness.processEvent(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()), 1, 1);
    testHarness.waitForInputProcessing();
    // now we should see the barrier and after that the buffered elements
    expectedOutput.add(new CheckpointBarrier(0, 0, CheckpointOptions.forFullCheckpoint()));
    expectedOutput.add(new StreamRecord<String>("Hello-0-0", initialTime));
    expectedOutput.add(new StreamRecord<String>("Ciao-0-0", initialTime));
    testHarness.endInput();
    testHarness.waitForTaskCompletion();
    TestHarnessUtil.assertOutputEquals("Output was not correct.", expectedOutput, testHarness.getOutput());
}
Also used : StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) CheckpointBarrier(org.apache.flink.runtime.io.network.api.CheckpointBarrier) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) StreamMap(org.apache.flink.streaming.api.operators.StreamMap) Test(org.junit.Test)

Example 18 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class AsyncWaitOperatorTest method testOperatorChainWithProcessingTime.

/**
	 *	Tests that the AsyncWaitOperator works together with chaining
	 */
@Test
public void testOperatorChainWithProcessingTime() throws Exception {
    JobVertex chainedVertex = createChainedVertex(false);
    final OneInputStreamTask<Integer, Integer> task = new OneInputStreamTask<>();
    final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>(task, 1, 1, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
    testHarness.setupOutputForSingletonOperatorChain();
    testHarness.taskConfig = chainedVertex.getConfiguration();
    final StreamConfig streamConfig = testHarness.getStreamConfig();
    final StreamConfig operatorChainStreamConfig = new StreamConfig(chainedVertex.getConfiguration());
    final AsyncWaitOperator<Integer, Integer> headOperator = operatorChainStreamConfig.getStreamOperator(AsyncWaitOperatorTest.class.getClassLoader());
    streamConfig.setStreamOperator(headOperator);
    testHarness.invoke();
    testHarness.waitForTaskRunning();
    long initialTimestamp = 0L;
    testHarness.processElement(new StreamRecord<>(5, initialTimestamp));
    testHarness.processElement(new StreamRecord<>(6, initialTimestamp + 1L));
    testHarness.processElement(new StreamRecord<>(7, initialTimestamp + 2L));
    testHarness.processElement(new StreamRecord<>(8, initialTimestamp + 3L));
    testHarness.processElement(new StreamRecord<>(9, initialTimestamp + 4L));
    testHarness.endInput();
    testHarness.waitForTaskCompletion();
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    expectedOutput.add(new StreamRecord<>(22, initialTimestamp));
    expectedOutput.add(new StreamRecord<>(26, initialTimestamp + 1L));
    expectedOutput.add(new StreamRecord<>(30, initialTimestamp + 2L));
    expectedOutput.add(new StreamRecord<>(34, initialTimestamp + 3L));
    expectedOutput.add(new StreamRecord<>(38, initialTimestamp + 4L));
    TestHarnessUtil.assertOutputEqualsSorted("Test for chained operator with AsyncWaitOperator failed", expectedOutput, testHarness.getOutput(), new StreamRecordComparator());
}
Also used : OneInputStreamTask(org.apache.flink.streaming.runtime.tasks.OneInputStreamTask) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Test(org.junit.Test)

Example 19 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class AsyncWaitOperatorTest method testStateSnapshotAndRestore.

@Test
public void testStateSnapshotAndRestore() throws Exception {
    final OneInputStreamTask<Integer, Integer> task = new OneInputStreamTask<>();
    final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>(task, 1, 1, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
    testHarness.setupOutputForSingletonOperatorChain();
    AsyncWaitOperator<Integer, Integer> operator = new AsyncWaitOperator<>(new LazyAsyncFunction(), TIMEOUT, 3, AsyncDataStream.OutputMode.ORDERED);
    final StreamConfig streamConfig = testHarness.getStreamConfig();
    streamConfig.setStreamOperator(operator);
    final AcknowledgeStreamMockEnvironment env = new AcknowledgeStreamMockEnvironment(testHarness.jobConfig, testHarness.taskConfig, testHarness.getExecutionConfig(), testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize);
    testHarness.invoke(env);
    testHarness.waitForTaskRunning();
    final long initialTime = 0L;
    testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
    testHarness.processElement(new StreamRecord<>(2, initialTime + 2));
    testHarness.processElement(new StreamRecord<>(3, initialTime + 3));
    testHarness.processElement(new StreamRecord<>(4, initialTime + 4));
    testHarness.waitForInputProcessing();
    final long checkpointId = 1L;
    final long checkpointTimestamp = 1L;
    final CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointId, checkpointTimestamp);
    task.triggerCheckpoint(checkpointMetaData, CheckpointOptions.forFullCheckpoint());
    env.getCheckpointLatch().await();
    assertEquals(checkpointId, env.getCheckpointId());
    LazyAsyncFunction.countDown();
    testHarness.endInput();
    testHarness.waitForTaskCompletion();
    // set the operator state from previous attempt into the restored one
    final OneInputStreamTask<Integer, Integer> restoredTask = new OneInputStreamTask<>();
    restoredTask.setInitialState(new TaskStateHandles(env.getCheckpointStateHandles()));
    final OneInputStreamTaskTestHarness<Integer, Integer> restoredTaskHarness = new OneInputStreamTaskTestHarness<>(restoredTask, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
    restoredTaskHarness.setupOutputForSingletonOperatorChain();
    AsyncWaitOperator<Integer, Integer> restoredOperator = new AsyncWaitOperator<>(new MyAsyncFunction(), TIMEOUT, 6, AsyncDataStream.OutputMode.ORDERED);
    restoredTaskHarness.getStreamConfig().setStreamOperator(restoredOperator);
    restoredTaskHarness.invoke();
    restoredTaskHarness.waitForTaskRunning();
    restoredTaskHarness.processElement(new StreamRecord<>(5, initialTime + 5));
    restoredTaskHarness.processElement(new StreamRecord<>(6, initialTime + 6));
    restoredTaskHarness.processElement(new StreamRecord<>(7, initialTime + 7));
    // trigger the checkpoint while processing stream elements
    restoredTask.triggerCheckpoint(new CheckpointMetaData(checkpointId, checkpointTimestamp), CheckpointOptions.forFullCheckpoint());
    restoredTaskHarness.processElement(new StreamRecord<>(8, initialTime + 8));
    restoredTaskHarness.endInput();
    restoredTaskHarness.waitForTaskCompletion();
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    expectedOutput.add(new StreamRecord<>(2, initialTime + 1));
    expectedOutput.add(new StreamRecord<>(4, initialTime + 2));
    expectedOutput.add(new StreamRecord<>(6, initialTime + 3));
    expectedOutput.add(new StreamRecord<>(8, initialTime + 4));
    expectedOutput.add(new StreamRecord<>(10, initialTime + 5));
    expectedOutput.add(new StreamRecord<>(12, initialTime + 6));
    expectedOutput.add(new StreamRecord<>(14, initialTime + 7));
    expectedOutput.add(new StreamRecord<>(16, initialTime + 8));
    // remove CheckpointBarrier which is not expected
    Iterator<Object> iterator = restoredTaskHarness.getOutput().iterator();
    while (iterator.hasNext()) {
        if (iterator.next() instanceof CheckpointBarrier) {
            iterator.remove();
        }
    }
    TestHarnessUtil.assertOutputEquals("StateAndRestored Test Output was not correct.", expectedOutput, restoredTaskHarness.getOutput());
}
Also used : OneInputStreamTask(org.apache.flink.streaming.runtime.tasks.OneInputStreamTask) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) CheckpointMetaData(org.apache.flink.runtime.checkpoint.CheckpointMetaData) TaskStateHandles(org.apache.flink.runtime.state.TaskStateHandles) CheckpointBarrier(org.apache.flink.runtime.io.network.api.CheckpointBarrier) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) MockInputSplitProvider(org.apache.flink.runtime.operators.testutils.MockInputSplitProvider) Test(org.junit.Test)

Example 20 with ConcurrentLinkedQueue

use of java.util.concurrent.ConcurrentLinkedQueue in project flink by apache.

the class AsyncWaitOperatorTest method testAsyncTimeout.

@Test
public void testAsyncTimeout() throws Exception {
    final long timeout = 10L;
    final AsyncWaitOperator<Integer, Integer> operator = new AsyncWaitOperator<>(new LazyAsyncFunction(), timeout, 2, AsyncDataStream.OutputMode.ORDERED);
    final Environment mockEnvironment = mock(Environment.class);
    final Configuration taskConfiguration = new Configuration();
    final ExecutionConfig executionConfig = new ExecutionConfig();
    final TaskMetricGroup metricGroup = new UnregisteredTaskMetricsGroup();
    final TaskManagerRuntimeInfo taskManagerRuntimeInfo = new TestingTaskManagerRuntimeInfo();
    final TaskInfo taskInfo = new TaskInfo("foobarTask", 1, 0, 1, 1);
    when(mockEnvironment.getTaskConfiguration()).thenReturn(taskConfiguration);
    when(mockEnvironment.getExecutionConfig()).thenReturn(executionConfig);
    when(mockEnvironment.getMetricGroup()).thenReturn(metricGroup);
    when(mockEnvironment.getTaskManagerInfo()).thenReturn(taskManagerRuntimeInfo);
    when(mockEnvironment.getTaskInfo()).thenReturn(taskInfo);
    when(mockEnvironment.getUserClassLoader()).thenReturn(AsyncWaitOperatorTest.class.getClassLoader());
    final OneInputStreamOperatorTestHarness<Integer, Integer> testHarness = new OneInputStreamOperatorTestHarness<>(operator, IntSerializer.INSTANCE, mockEnvironment);
    final long initialTime = 0L;
    final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    testHarness.open();
    testHarness.setProcessingTime(initialTime);
    synchronized (testHarness.getCheckpointLock()) {
        testHarness.processElement(new StreamRecord<>(1, initialTime));
        testHarness.setProcessingTime(initialTime + 5L);
        testHarness.processElement(new StreamRecord<>(2, initialTime + 5L));
    }
    // trigger the timeout of the first stream record
    testHarness.setProcessingTime(initialTime + timeout + 1L);
    // allow the second async stream record to be processed
    LazyAsyncFunction.countDown();
    // wait until all async collectors in the buffer have been emitted out.
    synchronized (testHarness.getCheckpointLock()) {
        testHarness.close();
    }
    expectedOutput.add(new StreamRecord<>(2, initialTime + 5L));
    TestHarnessUtil.assertOutputEquals("Output with watermark was not correct.", expectedOutput, testHarness.getOutput());
    ArgumentCaptor<Throwable> argumentCaptor = ArgumentCaptor.forClass(Throwable.class);
    verify(mockEnvironment).failExternally(argumentCaptor.capture());
    Throwable failureCause = argumentCaptor.getValue();
    Assert.assertNotNull(failureCause.getCause());
    Assert.assertTrue(failureCause.getCause() instanceof ExecutionException);
    Assert.assertNotNull(failureCause.getCause().getCause());
    Assert.assertTrue(failureCause.getCause().getCause() instanceof TimeoutException);
}
Also used : UnregisteredTaskMetricsGroup(org.apache.flink.runtime.operators.testutils.UnregisteredTaskMetricsGroup) Configuration(org.apache.flink.configuration.Configuration) TestingTaskManagerRuntimeInfo(org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo) TaskManagerRuntimeInfo(org.apache.flink.runtime.taskmanager.TaskManagerRuntimeInfo) TaskMetricGroup(org.apache.flink.runtime.metrics.groups.TaskMetricGroup) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) OneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness) TaskInfo(org.apache.flink.api.common.TaskInfo) TestingTaskManagerRuntimeInfo(org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo) Environment(org.apache.flink.runtime.execution.Environment) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) StreamMockEnvironment(org.apache.flink.streaming.runtime.tasks.StreamMockEnvironment) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) ExecutionException(java.util.concurrent.ExecutionException) TimeoutException(java.util.concurrent.TimeoutException) Test(org.junit.Test)

Aggregations

ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)236 Test (org.junit.Test)102 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)56 Watermark (org.apache.flink.streaming.api.watermark.Watermark)52 KeyedOneInputStreamOperatorTestHarness (org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness)43 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)40 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)40 CountDownLatch (java.util.concurrent.CountDownLatch)37 ArrayList (java.util.ArrayList)31 TimeWindow (org.apache.flink.streaming.api.windowing.windows.TimeWindow)28 ListStateDescriptor (org.apache.flink.api.common.state.ListStateDescriptor)18 ReducingStateDescriptor (org.apache.flink.api.common.state.ReducingStateDescriptor)17 IOException (java.io.IOException)15 Tuple3 (org.apache.flink.api.java.tuple.Tuple3)15 StreamRecord (org.apache.flink.streaming.runtime.streamrecord.StreamRecord)14 ExecutionException (java.util.concurrent.ExecutionException)13 ExecutorService (java.util.concurrent.ExecutorService)13 Map (java.util.Map)12 OperatorStateHandles (org.apache.flink.streaming.runtime.tasks.OperatorStateHandles)12 Iterator (java.util.Iterator)11