use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class SourceOperatorStreamTaskTest method testTriggeringStopWithSavepointWithDrain.
@Test
public void testTriggeringStopWithSavepointWithDrain() throws Exception {
SourceOperatorFactory<Integer> sourceOperatorFactory = new SourceOperatorFactory<>(new MockSource(Boundedness.CONTINUOUS_UNBOUNDED, 2), WatermarkStrategy.noWatermarks());
CompletableFuture<Boolean> checkpointCompleted = new CompletableFuture<>();
CheckpointResponder checkpointResponder = new TestCheckpointResponder() {
@Override
public void acknowledgeCheckpoint(JobID jobID, ExecutionAttemptID executionAttemptID, long checkpointId, CheckpointMetrics checkpointMetrics, TaskStateSnapshot subtaskState) {
super.acknowledgeCheckpoint(jobID, executionAttemptID, checkpointId, checkpointMetrics, subtaskState);
checkpointCompleted.complete(null);
}
};
try (StreamTaskMailboxTestHarness<Integer> testHarness = new StreamTaskMailboxTestHarnessBuilder<>(SourceOperatorStreamTask::new, BasicTypeInfo.INT_TYPE_INFO).setupOutputForSingletonOperatorChain(sourceOperatorFactory).setCheckpointResponder(checkpointResponder).build()) {
CompletableFuture<Boolean> triggerResult = testHarness.streamTask.triggerCheckpointAsync(new CheckpointMetaData(2, 2), CheckpointOptions.alignedNoTimeout(SavepointType.terminate(SavepointFormatType.CANONICAL), CheckpointStorageLocationReference.getDefault()));
checkpointCompleted.whenComplete((ignored, exception) -> testHarness.streamTask.notifyCheckpointCompleteAsync(2));
testHarness.waitForTaskCompletion();
testHarness.finishProcessing();
assertTrue(triggerResult.isDone());
assertTrue(triggerResult.get());
assertTrue(checkpointCompleted.isDone());
}
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class StreamingJobGraphGeneratorTest method testCoordinatedOperator.
@Test
public void testCoordinatedOperator() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> source = env.fromSource(new MockSource(Boundedness.BOUNDED, 1), WatermarkStrategy.noWatermarks(), "TestSource");
source.addSink(new DiscardingSink<>());
StreamGraph streamGraph = env.getStreamGraph();
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph);
// There should be only one job vertex.
assertEquals(1, jobGraph.getNumberOfVertices());
JobVertex jobVertex = jobGraph.getVerticesAsArray()[0];
List<SerializedValue<OperatorCoordinator.Provider>> coordinatorProviders = jobVertex.getOperatorCoordinators();
// There should be only one coordinator provider.
assertEquals(1, coordinatorProviders.size());
// The invokable class should be SourceOperatorStreamTask.
final ClassLoader classLoader = getClass().getClassLoader();
assertEquals(SourceOperatorStreamTask.class, jobVertex.getInvokableClass(classLoader));
StreamOperatorFactory operatorFactory = new StreamConfig(jobVertex.getConfiguration()).getStreamOperatorFactory(classLoader);
assertTrue(operatorFactory instanceof SourceOperatorFactory);
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class StreamGraphGeneratorExecutionModeDetectionTest method testExecutionModePropagationFromEnvWithAutomaticAndBoundedSource.
@Test
public void testExecutionModePropagationFromEnvWithAutomaticAndBoundedSource() {
final Configuration config = new Configuration();
config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.AUTOMATIC);
final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
environment.enableCheckpointing(100L);
environment.configure(config, getClass().getClassLoader());
environment.fromSource(new MockSource(Boundedness.BOUNDED, 100), WatermarkStrategy.noWatermarks(), "bounded-source").print();
assertTrue(environment.isChainingEnabled());
assertThat(environment.getCheckpointInterval(), is(equalTo(100L)));
final StreamGraph streamGraph = environment.getStreamGraph();
assertThat(streamGraph, hasProperties(GlobalStreamExchangeMode.ALL_EDGES_BLOCKING, JobType.BATCH, false, false));
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class StreamingJobGraphGeneratorTest method testOperatorCoordinatorAddedToJobVertex.
@Test
public void testOperatorCoordinatorAddedToJobVertex() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<Integer> stream = env.fromSource(new MockSource(Boundedness.BOUNDED, 1), WatermarkStrategy.noWatermarks(), "TestingSource");
OneInputTransformation<Integer, Integer> resultTransform = new OneInputTransformation<Integer, Integer>(stream.getTransformation(), "AnyName", new CoordinatedTransformOperatorFactory(), BasicTypeInfo.INT_TYPE_INFO, env.getParallelism());
new TestingSingleOutputStreamOperator<>(env, resultTransform).print();
JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());
assertEquals(2, jobGraph.getVerticesAsArray()[0].getOperatorCoordinators().size());
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class DataStreamSourceTest method testConstructor.
/**
* Test constructor for new Sources (FLIP-27).
*/
@Test
public void testConstructor() {
int expectParallelism = 100;
boolean expectIsParallel = true;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
MockSource mockSource = new MockSource(Boundedness.BOUNDED, 10);
DataStreamSource<Integer> stream = env.fromSource(mockSource, WatermarkStrategy.noWatermarks(), "TestingSource");
stream.setParallelism(expectParallelism);
assertEquals(expectIsParallel, stream.isParallel());
assertEquals(expectParallelism, stream.getParallelism());
}
Aggregations