use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class SubtaskCheckpointCoordinatorTest method testBroadcastCancelCheckpointMarkerOnAbortingFromCoordinator.
@Test
public void testBroadcastCancelCheckpointMarkerOnAbortingFromCoordinator() throws Exception {
OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(OneInputStreamTask::new, 1, 1, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
StreamConfig streamConfig = testHarness.getStreamConfig();
streamConfig.setStreamOperator(new MapOperator());
testHarness.invoke();
testHarness.waitForTaskRunning();
MockEnvironment mockEnvironment = MockEnvironment.builder().build();
try (SubtaskCheckpointCoordinator subtaskCheckpointCoordinator = new MockSubtaskCheckpointCoordinatorBuilder().setEnvironment(mockEnvironment).build()) {
ArrayList<Object> recordOrEvents = new ArrayList<>();
StreamElementSerializer<String> stringStreamElementSerializer = new StreamElementSerializer<>(StringSerializer.INSTANCE);
ResultPartitionWriter resultPartitionWriter = new RecordOrEventCollectingResultPartitionWriter<>(recordOrEvents, stringStreamElementSerializer);
mockEnvironment.addOutputs(Collections.singletonList(resultPartitionWriter));
OneInputStreamTask<String, String> task = testHarness.getTask();
OperatorChain<String, OneInputStreamOperator<String, String>> operatorChain = new RegularOperatorChain<>(task, StreamTask.createRecordWriterDelegate(streamConfig, mockEnvironment));
long checkpointId = 42L;
// notify checkpoint aborted before execution.
subtaskCheckpointCoordinator.notifyCheckpointAborted(checkpointId, operatorChain, () -> true);
subtaskCheckpointCoordinator.checkpointState(new CheckpointMetaData(checkpointId, System.currentTimeMillis()), CheckpointOptions.forCheckpointWithDefaultLocation(), new CheckpointMetricsBuilder(), operatorChain, false, () -> false);
assertEquals(1, recordOrEvents.size());
Object recordOrEvent = recordOrEvents.get(0);
// ensure CancelCheckpointMarker is broadcast downstream.
assertTrue(recordOrEvent instanceof CancelCheckpointMarker);
assertEquals(checkpointId, ((CancelCheckpointMarker) recordOrEvent).getCheckpointId());
testHarness.endInput();
testHarness.waitForTaskCompletion();
}
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class SubtaskCheckpointCoordinatorTest method testNotifyCheckpointSubsumed.
@Test
public void testNotifyCheckpointSubsumed() throws Exception {
TestTaskStateManager stateManager = new TestTaskStateManager();
MockEnvironment mockEnvironment = MockEnvironment.builder().setTaskStateManager(stateManager).build();
try (SubtaskCheckpointCoordinatorImpl subtaskCheckpointCoordinator = (SubtaskCheckpointCoordinatorImpl) new MockSubtaskCheckpointCoordinatorBuilder().setEnvironment(mockEnvironment).setUnalignedCheckpointEnabled(true).build()) {
StreamMap<String, String> streamMap = new StreamMap<>((MapFunction<String, String>) value -> value);
streamMap.setProcessingTimeService(new TestProcessingTimeService());
final OperatorChain<String, AbstractStreamOperator<String>> operatorChain = operatorChain(streamMap);
StreamTaskStateInitializerImpl stateInitializer = new StreamTaskStateInitializerImpl(mockEnvironment, new TestStateBackend());
operatorChain.initializeStateAndOpenOperators(stateInitializer);
long checkpointId = 42L;
subtaskCheckpointCoordinator.getChannelStateWriter().start(checkpointId, CheckpointOptions.forCheckpointWithDefaultLocation());
subtaskCheckpointCoordinator.checkpointState(new CheckpointMetaData(checkpointId, System.currentTimeMillis()), CheckpointOptions.forCheckpointWithDefaultLocation(), new CheckpointMetricsBuilder(), operatorChain, false, () -> false);
long notifySubsumeCheckpointId = checkpointId + 1L;
// notify checkpoint aborted before execution.
subtaskCheckpointCoordinator.notifyCheckpointSubsumed(notifySubsumeCheckpointId, operatorChain, () -> true);
assertEquals(notifySubsumeCheckpointId, ((TestStateBackend.TestKeyedStateBackend<?>) streamMap.getKeyedStateBackend()).getSubsumeCheckpointId());
}
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class SubtaskCheckpointCoordinatorTest method testNotifyCheckpointAbortedBeforeAsyncPhase.
@Test
public void testNotifyCheckpointAbortedBeforeAsyncPhase() throws Exception {
TestTaskStateManager stateManager = new TestTaskStateManager();
MockEnvironment mockEnvironment = MockEnvironment.builder().setTaskStateManager(stateManager).build();
try (SubtaskCheckpointCoordinatorImpl subtaskCheckpointCoordinator = (SubtaskCheckpointCoordinatorImpl) new MockSubtaskCheckpointCoordinatorBuilder().setEnvironment(mockEnvironment).setUnalignedCheckpointEnabled(true).build()) {
CheckpointOperator checkpointOperator = new CheckpointOperator(new OperatorSnapshotFutures());
final OperatorChain<String, AbstractStreamOperator<String>> operatorChain = operatorChain(checkpointOperator);
long checkpointId = 42L;
// notify checkpoint aborted before execution.
subtaskCheckpointCoordinator.notifyCheckpointAborted(checkpointId, operatorChain, () -> true);
assertEquals(1, subtaskCheckpointCoordinator.getAbortedCheckpointSize());
subtaskCheckpointCoordinator.getChannelStateWriter().start(checkpointId, CheckpointOptions.forCheckpointWithDefaultLocation());
subtaskCheckpointCoordinator.checkpointState(new CheckpointMetaData(checkpointId, System.currentTimeMillis()), CheckpointOptions.forCheckpointWithDefaultLocation(), new CheckpointMetricsBuilder(), operatorChain, false, () -> false);
assertFalse(checkpointOperator.isCheckpointed());
assertEquals(-1, stateManager.getReportedCheckpointId());
assertEquals(0, subtaskCheckpointCoordinator.getAbortedCheckpointSize());
assertEquals(0, subtaskCheckpointCoordinator.getAsyncCheckpointRunnableSize());
}
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class SubtaskCheckpointCoordinatorTest method testNotifyCheckpointComplete.
@Test
public void testNotifyCheckpointComplete() throws Exception {
TestTaskStateManager stateManager = new TestTaskStateManager();
MockEnvironment mockEnvironment = MockEnvironment.builder().setTaskStateManager(stateManager).build();
try (SubtaskCheckpointCoordinator subtaskCheckpointCoordinator = new MockSubtaskCheckpointCoordinatorBuilder().setEnvironment(mockEnvironment).build()) {
final OperatorChain<?, ?> operatorChain = getOperatorChain(mockEnvironment);
long checkpointId = 42L;
{
subtaskCheckpointCoordinator.notifyCheckpointComplete(checkpointId, operatorChain, () -> true);
assertEquals(checkpointId, stateManager.getNotifiedCompletedCheckpointId());
}
long newCheckpointId = checkpointId + 1;
{
subtaskCheckpointCoordinator.notifyCheckpointComplete(newCheckpointId, operatorChain, () -> false);
// even task is not running, state manager could still receive the notification.
assertEquals(newCheckpointId, stateManager.getNotifiedCompletedCheckpointId());
}
}
}
use of org.apache.flink.runtime.operators.testutils.MockEnvironment in project flink by apache.
the class StreamOperatorChainingTest method testMultiChaining.
/**
* Verify that multi-chaining works.
*/
private void testMultiChaining(StreamExecutionEnvironment env) throws Exception {
// set parallelism to 2 to avoid chaining with source in case when available processors is
// 1.
env.setParallelism(2);
// the actual elements will not be used
DataStream<Integer> input = env.fromElements(1, 2, 3);
sink1Results = new ArrayList<>();
sink2Results = new ArrayList<>();
input = input.map(value -> value);
input.map(value -> "First: " + value).addSink(new SinkFunction<String>() {
@Override
public void invoke(String value, Context ctx) throws Exception {
sink1Results.add(value);
}
});
input.map(value -> "Second: " + value).addSink(new SinkFunction<String>() {
@Override
public void invoke(String value, Context ctx) throws Exception {
sink2Results.add(value);
}
});
// be build our own StreamTask and OperatorChain
JobGraph jobGraph = env.getStreamGraph().getJobGraph();
Assert.assertTrue(jobGraph.getVerticesSortedTopologicallyFromSources().size() == 2);
JobVertex chainedVertex = jobGraph.getVerticesSortedTopologicallyFromSources().get(1);
Configuration configuration = chainedVertex.getConfiguration();
StreamConfig streamConfig = new StreamConfig(configuration);
StreamMap<Integer, Integer> headOperator = streamConfig.getStreamOperator(Thread.currentThread().getContextClassLoader());
try (MockEnvironment environment = createMockEnvironment(chainedVertex.getName())) {
StreamTask<Integer, StreamMap<Integer, Integer>> mockTask = createMockTask(streamConfig, environment);
OperatorChain<Integer, StreamMap<Integer, Integer>> operatorChain = createOperatorChain(streamConfig, environment, mockTask);
headOperator.setup(mockTask, streamConfig, operatorChain.getMainOperatorOutput());
operatorChain.initializeStateAndOpenOperators(null);
headOperator.processElement(new StreamRecord<>(1));
headOperator.processElement(new StreamRecord<>(2));
headOperator.processElement(new StreamRecord<>(3));
assertThat(sink1Results, contains("First: 1", "First: 2", "First: 3"));
assertThat(sink2Results, contains("Second: 1", "Second: 2", "Second: 3"));
}
}
Aggregations