Search in sources :

Example 1 with OneInputStreamTaskTestHarness

use of org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness in project flink by apache.

the class AsyncWaitOperatorTest method testStateSnapshotAndRestore.

@Test
public void testStateSnapshotAndRestore() throws Exception {
    final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, 1, 1, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
    testHarness.setupOutputForSingletonOperatorChain();
    AsyncWaitOperatorFactory<Integer, Integer> factory = new AsyncWaitOperatorFactory<>(new LazyAsyncFunction(), TIMEOUT, 4, AsyncDataStream.OutputMode.ORDERED);
    final StreamConfig streamConfig = testHarness.getStreamConfig();
    OperatorID operatorID = new OperatorID(42L, 4711L);
    streamConfig.setStreamOperatorFactory(factory);
    streamConfig.setOperatorID(operatorID);
    final TestTaskStateManager taskStateManagerMock = testHarness.getTaskStateManager();
    testHarness.invoke();
    testHarness.waitForTaskRunning();
    final OneInputStreamTask<Integer, Integer> task = testHarness.getTask();
    final long initialTime = 0L;
    testHarness.processElement(new StreamRecord<>(1, initialTime + 1));
    testHarness.processElement(new StreamRecord<>(2, initialTime + 2));
    testHarness.processElement(new StreamRecord<>(3, initialTime + 3));
    testHarness.processElement(new StreamRecord<>(4, initialTime + 4));
    testHarness.waitForInputProcessing();
    final long checkpointId = 1L;
    final long checkpointTimestamp = 1L;
    final CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointId, checkpointTimestamp);
    task.triggerCheckpointAsync(checkpointMetaData, CheckpointOptions.forCheckpointWithDefaultLocation());
    taskStateManagerMock.getWaitForReportLatch().await();
    assertEquals(checkpointId, taskStateManagerMock.getReportedCheckpointId());
    LazyAsyncFunction.countDown();
    testHarness.endInput();
    testHarness.waitForTaskCompletion();
    // set the operator state from previous attempt into the restored one
    TaskStateSnapshot subtaskStates = taskStateManagerMock.getLastJobManagerTaskStateSnapshot();
    final OneInputStreamTaskTestHarness<Integer, Integer> restoredTaskHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
    restoredTaskHarness.setTaskStateSnapshot(checkpointId, subtaskStates);
    restoredTaskHarness.setupOutputForSingletonOperatorChain();
    AsyncWaitOperatorFactory<Integer, Integer> restoredOperator = new AsyncWaitOperatorFactory<>(new MyAsyncFunction(), TIMEOUT, 6, AsyncDataStream.OutputMode.ORDERED);
    restoredTaskHarness.getStreamConfig().setStreamOperatorFactory(restoredOperator);
    restoredTaskHarness.getStreamConfig().setOperatorID(operatorID);
    restoredTaskHarness.invoke();
    restoredTaskHarness.waitForTaskRunning();
    final OneInputStreamTask<Integer, Integer> restoredTask = restoredTaskHarness.getTask();
    restoredTaskHarness.processElement(new StreamRecord<>(5, initialTime + 5));
    restoredTaskHarness.processElement(new StreamRecord<>(6, initialTime + 6));
    restoredTaskHarness.processElement(new StreamRecord<>(7, initialTime + 7));
    // trigger the checkpoint while processing stream elements
    restoredTask.triggerCheckpointAsync(new CheckpointMetaData(checkpointId, checkpointTimestamp), CheckpointOptions.forCheckpointWithDefaultLocation()).get();
    restoredTaskHarness.processElement(new StreamRecord<>(8, initialTime + 8));
    restoredTaskHarness.endInput();
    restoredTaskHarness.waitForTaskCompletion();
    ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
    expectedOutput.add(new StreamRecord<>(2, initialTime + 1));
    expectedOutput.add(new StreamRecord<>(4, initialTime + 2));
    expectedOutput.add(new StreamRecord<>(6, initialTime + 3));
    expectedOutput.add(new StreamRecord<>(8, initialTime + 4));
    expectedOutput.add(new StreamRecord<>(10, initialTime + 5));
    expectedOutput.add(new StreamRecord<>(12, initialTime + 6));
    expectedOutput.add(new StreamRecord<>(14, initialTime + 7));
    expectedOutput.add(new StreamRecord<>(16, initialTime + 8));
    // remove CheckpointBarrier which is not expected
    restoredTaskHarness.getOutput().removeIf(record -> record instanceof CheckpointBarrier);
    TestHarnessUtil.assertOutputEquals("StateAndRestored Test Output was not correct.", expectedOutput, restoredTaskHarness.getOutput());
}
Also used : OneInputStreamTask(org.apache.flink.streaming.runtime.tasks.OneInputStreamTask) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) CheckpointMetaData(org.apache.flink.runtime.checkpoint.CheckpointMetaData) CheckpointBarrier(org.apache.flink.runtime.io.network.api.CheckpointBarrier) TestTaskStateManager(org.apache.flink.runtime.state.TestTaskStateManager) TaskStateSnapshot(org.apache.flink.runtime.checkpoint.TaskStateSnapshot) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) Test(org.junit.Test)

Example 2 with OneInputStreamTaskTestHarness

use of org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness in project flink by apache.

the class AsyncWaitOperatorTest method testOperatorChainWithProcessingTime.

/**
 * Tests that the AsyncWaitOperator works together with chaining.
 */
@Test
public void testOperatorChainWithProcessingTime() throws Exception {
    JobVertex chainedVertex = createChainedVertex(new MyAsyncFunction(), new MyAsyncFunction());
    final OneInputStreamTaskTestHarness<Integer, Integer> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, 1, 1, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO);
    testHarness.setupOutputForSingletonOperatorChain();
    testHarness.taskConfig = chainedVertex.getConfiguration();
    final StreamConfig streamConfig = testHarness.getStreamConfig();
    final StreamConfig operatorChainStreamConfig = new StreamConfig(chainedVertex.getConfiguration());
    streamConfig.setStreamOperatorFactory(operatorChainStreamConfig.getStreamOperatorFactory(AsyncWaitOperatorTest.class.getClassLoader()));
    testHarness.invoke();
    testHarness.waitForTaskRunning();
    long initialTimestamp = 0L;
    testHarness.processElement(new StreamRecord<>(5, initialTimestamp));
    testHarness.processElement(new StreamRecord<>(6, initialTimestamp + 1L));
    testHarness.processElement(new StreamRecord<>(7, initialTimestamp + 2L));
    testHarness.processElement(new StreamRecord<>(8, initialTimestamp + 3L));
    testHarness.processElement(new StreamRecord<>(9, initialTimestamp + 4L));
    testHarness.endInput();
    testHarness.waitForTaskCompletion();
    List<Object> expectedOutput = new LinkedList<>();
    expectedOutput.add(new StreamRecord<>(22, initialTimestamp));
    expectedOutput.add(new StreamRecord<>(26, initialTimestamp + 1L));
    expectedOutput.add(new StreamRecord<>(30, initialTimestamp + 2L));
    expectedOutput.add(new StreamRecord<>(34, initialTimestamp + 3L));
    expectedOutput.add(new StreamRecord<>(38, initialTimestamp + 4L));
    TestHarnessUtil.assertOutputEqualsSorted("Test for chained operator with AsyncWaitOperator failed", expectedOutput, testHarness.getOutput(), new StreamRecordComparator());
}
Also used : OneInputStreamTask(org.apache.flink.streaming.runtime.tasks.OneInputStreamTask) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) LinkedList(java.util.LinkedList) JobVertex(org.apache.flink.runtime.jobgraph.JobVertex) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) Test(org.junit.Test)

Example 3 with OneInputStreamTaskTestHarness

use of org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness in project flink by apache.

the class StatefulOperatorChainedTaskTest method createRunAndCheckpointOperatorChain.

private JobManagerTaskRestore createRunAndCheckpointOperatorChain(OperatorID headId, OneInputStreamOperator<String, String> headOperator, OperatorID tailId, OneInputStreamOperator<String, String> tailOperator, Optional<JobManagerTaskRestore> restore) throws Exception {
    File localRootDir = temporaryFolder.newFolder();
    final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, 1, 1, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, localRootDir);
    testHarness.setupOperatorChain(headId, headOperator).chain(tailId, tailOperator, StringSerializer.INSTANCE, true).finish();
    if (restore.isPresent()) {
        JobManagerTaskRestore taskRestore = restore.get();
        testHarness.setTaskStateSnapshot(taskRestore.getRestoreCheckpointId(), taskRestore.getTaskStateSnapshot());
    }
    StreamMockEnvironment environment = new StreamMockEnvironment(testHarness.jobConfig, testHarness.taskConfig, testHarness.getExecutionConfig(), testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, testHarness.getTaskStateManager());
    Configuration configuration = new Configuration();
    configuration.setString(STATE_BACKEND.key(), "rocksdb");
    File file = temporaryFolder.newFolder();
    configuration.setString(CHECKPOINTS_DIRECTORY.key(), file.toURI().toString());
    configuration.setString(INCREMENTAL_CHECKPOINTS.key(), "true");
    environment.setTaskManagerInfo(new TestingTaskManagerRuntimeInfo(configuration, System.getProperty("java.io.tmpdir").split(",|" + File.pathSeparator)));
    testHarness.invoke(environment);
    testHarness.waitForTaskRunning();
    OneInputStreamTask<String, String> streamTask = testHarness.getTask();
    processRecords(testHarness);
    triggerCheckpoint(testHarness, streamTask);
    TestTaskStateManager taskStateManager = testHarness.getTaskStateManager();
    JobManagerTaskRestore jobManagerTaskRestore = new JobManagerTaskRestore(taskStateManager.getReportedCheckpointId(), taskStateManager.getLastJobManagerTaskStateSnapshot());
    testHarness.endInput();
    testHarness.waitForTaskCompletion();
    return jobManagerTaskRestore;
}
Also used : TestTaskStateManager(org.apache.flink.runtime.state.TestTaskStateManager) TestingTaskManagerRuntimeInfo(org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo) Configuration(org.apache.flink.configuration.Configuration) OneInputStreamTask(org.apache.flink.streaming.runtime.tasks.OneInputStreamTask) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) JobManagerTaskRestore(org.apache.flink.runtime.checkpoint.JobManagerTaskRestore) StreamMockEnvironment(org.apache.flink.streaming.runtime.tasks.StreamMockEnvironment) File(java.io.File) MockInputSplitProvider(org.apache.flink.runtime.operators.testutils.MockInputSplitProvider)

Example 4 with OneInputStreamTaskTestHarness

use of org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness in project flink by apache.

the class RocksDBAsyncSnapshotTest method testFullyAsyncSnapshot.

/**
 * This ensures that asynchronous state handles are actually materialized asynchronously.
 *
 * <p>We use latches to block at various stages and see if the code still continues through the
 * parts that are not asynchronous. If the checkpoint is not done asynchronously the test will
 * simply lock forever.
 */
@Test
public void testFullyAsyncSnapshot() throws Exception {
    final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.setupOutputForSingletonOperatorChain();
    testHarness.configureForKeyedStream(new KeySelector<String, String>() {

        @Override
        public String getKey(String value) throws Exception {
            return value;
        }
    }, BasicTypeInfo.STRING_TYPE_INFO);
    StreamConfig streamConfig = testHarness.getStreamConfig();
    File dbDir = temporaryFolder.newFolder();
    RocksDBStateBackend backend = new RocksDBStateBackend(new MemoryStateBackend());
    backend.setDbStoragePath(dbDir.getAbsolutePath());
    streamConfig.setStateBackend(backend);
    streamConfig.setStreamOperator(new AsyncCheckpointOperator());
    streamConfig.setOperatorID(new OperatorID());
    final OneShotLatch delayCheckpointLatch = new OneShotLatch();
    final OneShotLatch ensureCheckpointLatch = new OneShotLatch();
    CheckpointResponder checkpointResponderMock = new CheckpointResponder() {

        @Override
        public void acknowledgeCheckpoint(JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics, TaskStateSnapshot subtaskState) {
            // even though the async checkpoint would not finish
            try {
                delayCheckpointLatch.await();
            } catch (InterruptedException e) {
                throw new RuntimeException(e);
            }
            boolean hasManagedKeyedState = false;
            for (Map.Entry<OperatorID, OperatorSubtaskState> entry : subtaskState.getSubtaskStateMappings()) {
                OperatorSubtaskState state = entry.getValue();
                if (state != null) {
                    hasManagedKeyedState |= state.getManagedKeyedState() != null;
                }
            }
            // should be one k/v state
            assertTrue(hasManagedKeyedState);
            // we now know that the checkpoint went through
            ensureCheckpointLatch.trigger();
        }

        @Override
        public void reportCheckpointMetrics(JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics) {
        }

        @Override
        public void declineCheckpoint(JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointException checkpointException) {
        }
    };
    JobID jobID = new JobID();
    ExecutionAttemptID executionAttemptID = new ExecutionAttemptID();
    TestTaskStateManager taskStateManagerTestMock = new TestTaskStateManager(jobID, executionAttemptID, checkpointResponderMock, TestLocalRecoveryConfig.disabled(), new InMemoryStateChangelogStorage(), new HashMap<>(), -1L, new OneShotLatch());
    StreamMockEnvironment mockEnv = new StreamMockEnvironment(testHarness.jobConfig, testHarness.taskConfig, testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, taskStateManagerTestMock);
    AtomicReference<Throwable> errorRef = new AtomicReference<>();
    mockEnv.setExternalExceptionHandler(errorRef::set);
    testHarness.invoke(mockEnv);
    testHarness.waitForTaskRunning();
    final OneInputStreamTask<String, String> task = testHarness.getTask();
    task.triggerCheckpointAsync(new CheckpointMetaData(42, 17), CheckpointOptions.forCheckpointWithDefaultLocation()).get();
    testHarness.processElement(new StreamRecord<>("Wohoo", 0));
    // now we allow the checkpoint
    delayCheckpointLatch.trigger();
    // wait for the checkpoint to go through
    ensureCheckpointLatch.await();
    testHarness.endInput();
    ExecutorService threadPool = task.getAsyncOperationsThreadPool();
    threadPool.shutdown();
    Assert.assertTrue(threadPool.awaitTermination(60_000, TimeUnit.MILLISECONDS));
    testHarness.waitForTaskCompletion();
    if (errorRef.get() != null) {
        fail("Unexpected exception during execution.");
    }
}
Also used : OneInputStreamTask(org.apache.flink.streaming.runtime.tasks.OneInputStreamTask) CheckpointException(org.apache.flink.runtime.checkpoint.CheckpointException) MemoryStateBackend(org.apache.flink.runtime.state.memory.MemoryStateBackend) CheckpointMetrics(org.apache.flink.runtime.checkpoint.CheckpointMetrics) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) OperatorSubtaskState(org.apache.flink.runtime.checkpoint.OperatorSubtaskState) TaskStateSnapshot(org.apache.flink.runtime.checkpoint.TaskStateSnapshot) InMemoryStateChangelogStorage(org.apache.flink.runtime.state.changelog.inmemory.InMemoryStateChangelogStorage) OneShotLatch(org.apache.flink.core.testutils.OneShotLatch) StreamMockEnvironment(org.apache.flink.streaming.runtime.tasks.StreamMockEnvironment) MockInputSplitProvider(org.apache.flink.runtime.operators.testutils.MockInputSplitProvider) ExecutionAttemptID(org.apache.flink.runtime.executiongraph.ExecutionAttemptID) CheckpointResponder(org.apache.flink.runtime.taskmanager.CheckpointResponder) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) AtomicReference(java.util.concurrent.atomic.AtomicReference) CheckpointMetaData(org.apache.flink.runtime.checkpoint.CheckpointMetaData) CheckpointException(org.apache.flink.runtime.checkpoint.CheckpointException) CancelTaskException(org.apache.flink.runtime.execution.CancelTaskException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) TestTaskStateManager(org.apache.flink.runtime.state.TestTaskStateManager) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) ExecutorService(java.util.concurrent.ExecutorService) File(java.io.File) Map(java.util.Map) HashMap(java.util.HashMap) JobID(org.apache.flink.api.common.JobID) Test(org.junit.Test)

Example 5 with OneInputStreamTaskTestHarness

use of org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness in project flink by apache.

the class TestProcessingTimeServiceTest method testCustomTimeServiceProvider.

@Test
public void testCustomTimeServiceProvider() throws Throwable {
    final TestProcessingTimeService tp = new TestProcessingTimeService();
    final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>((env) -> new OneInputStreamTask<>(env, tp), BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
    testHarness.setupOutputForSingletonOperatorChain();
    StreamConfig streamConfig = testHarness.getStreamConfig();
    StreamMap<String, String> mapOperator = new StreamMap<>(new StreamTaskTimerTest.DummyMapFunction<>());
    streamConfig.setStreamOperator(mapOperator);
    streamConfig.setOperatorID(new OperatorID());
    testHarness.invoke();
    testHarness.waitForTaskRunning();
    ProcessingTimeService processingTimeService = ((StreamMap<?, ?>) testHarness.getHeadOperator()).getProcessingTimeService();
    assertEquals(Long.MIN_VALUE, processingTimeService.getCurrentProcessingTime());
    tp.setCurrentTime(11);
    assertEquals(processingTimeService.getCurrentProcessingTime(), 11);
    tp.setCurrentTime(15);
    tp.setCurrentTime(16);
    assertEquals(processingTimeService.getCurrentProcessingTime(), 16);
    // register 2 tasks
    processingTimeService.registerTimer(30, timestamp -> {
    });
    processingTimeService.registerTimer(40, timestamp -> {
    });
    assertEquals(2, tp.getNumActiveTimers());
    tp.setCurrentTime(35);
    assertEquals(1, tp.getNumActiveTimers());
    tp.setCurrentTime(40);
    assertEquals(0, tp.getNumActiveTimers());
    tp.shutdownService();
}
Also used : ProcessingTimeService(org.apache.flink.streaming.runtime.tasks.ProcessingTimeService) TestProcessingTimeService(org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService) OneInputStreamTaskTestHarness(org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness) TestProcessingTimeService(org.apache.flink.streaming.runtime.tasks.TestProcessingTimeService) StreamConfig(org.apache.flink.streaming.api.graph.StreamConfig) OperatorID(org.apache.flink.runtime.jobgraph.OperatorID) StreamMap(org.apache.flink.streaming.api.operators.StreamMap) Test(org.junit.Test)

Aggregations

OneInputStreamTaskTestHarness (org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness)10 OneInputStreamTask (org.apache.flink.streaming.runtime.tasks.OneInputStreamTask)9 StreamConfig (org.apache.flink.streaming.api.graph.StreamConfig)8 Test (org.junit.Test)8 OperatorID (org.apache.flink.runtime.jobgraph.OperatorID)5 TestTaskStateManager (org.apache.flink.runtime.state.TestTaskStateManager)4 File (java.io.File)3 CheckpointMetaData (org.apache.flink.runtime.checkpoint.CheckpointMetaData)3 MockInputSplitProvider (org.apache.flink.runtime.operators.testutils.MockInputSplitProvider)3 StreamMap (org.apache.flink.streaming.api.operators.StreamMap)3 StreamMockEnvironment (org.apache.flink.streaming.runtime.tasks.StreamMockEnvironment)3 IOException (java.io.IOException)2 ExecutionException (java.util.concurrent.ExecutionException)2 ExecutorService (java.util.concurrent.ExecutorService)2 AtomicReference (java.util.concurrent.atomic.AtomicReference)2 Configuration (org.apache.flink.configuration.Configuration)2 OneShotLatch (org.apache.flink.core.testutils.OneShotLatch)2 CheckpointException (org.apache.flink.runtime.checkpoint.CheckpointException)2 TaskStateSnapshot (org.apache.flink.runtime.checkpoint.TaskStateSnapshot)2 CancelTaskException (org.apache.flink.runtime.execution.CancelTaskException)2