use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class StreamGraphGeneratorExecutionModeDetectionTest method testExecutionModePropagationFromEnvWithBatchAndUnboundedSource.
@Test
public void testExecutionModePropagationFromEnvWithBatchAndUnboundedSource() {
final Configuration config = new Configuration();
config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
environment.configure(config, getClass().getClassLoader());
environment.fromSource(new MockSource(Boundedness.CONTINUOUS_UNBOUNDED, 100), WatermarkStrategy.noWatermarks(), "unbounded-source").print();
thrown.expect(IllegalStateException.class);
thrown.expectMessage("combination is not allowed");
environment.getStreamGraph();
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class StreamGraphGeneratorExecutionModeDetectionTest method testExecutionModePropagationFromEnvWithDefaultAndBoundedSource.
@Test
public void testExecutionModePropagationFromEnvWithDefaultAndBoundedSource() {
final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
environment.enableCheckpointing(100);
environment.fromSource(new MockSource(Boundedness.BOUNDED, 100), WatermarkStrategy.noWatermarks(), "bounded-source").print();
assertThat(environment.getStreamGraph(), hasProperties(GlobalStreamExchangeMode.ALL_EDGES_PIPELINED, JobType.STREAMING, true, true));
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class StreamGraphGeneratorBatchExecutionTest method testFeedbackThrowsExceptionInBatch.
@Test
public void testFeedbackThrowsExceptionInBatch() {
final SourceTransformation<Integer, ?, ?> bounded = new SourceTransformation<>("Bounded Source", new MockSource(Boundedness.BOUNDED, 100), WatermarkStrategy.noWatermarks(), IntegerTypeInfo.of(Integer.class), 1);
final FeedbackTransformation<Integer> feedbackTransformation = new FeedbackTransformation<>(bounded, 5L);
testNoSupportForIterationsInBatchHelper(bounded, feedbackTransformation);
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class MultipleInputNodeCreationProcessorTest method createChainableStream.
private void createChainableStream(TableTestUtil util) {
DataStreamSource<Integer> dataStream = util.getStreamEnv().fromSource(new MockSource(Boundedness.BOUNDED, 1), WatermarkStrategy.noWatermarks(), "chainableStream");
TableTestUtil.createTemporaryView(util.tableEnv(), "chainableStream", dataStream, scala.Option.apply(new Expression[] { ApiExpressionUtils.unresolvedRef("a") }), scala.Option.empty(), scala.Option.empty());
}
use of org.apache.flink.api.connector.source.mocks.MockSource in project flink by apache.
the class SourceOperatorLatencyMetricsTest method testLatencyMarkEmission.
private void testLatencyMarkEmission(boolean shouldExpectLatencyMarkers, Configuration taskManagerConfig, ExecutionConfig executionConfig) throws Exception {
try (SourceOperatorTestHarness testHarness = new SourceOperatorTestHarness(new SourceOperatorFactory(new MockSource(Boundedness.CONTINUOUS_UNBOUNDED, 1), WatermarkStrategy.noWatermarks()), new MockEnvironmentBuilder().setTaskManagerRuntimeInfo(new TestingTaskManagerRuntimeInfo(taskManagerConfig)).setExecutionConfig(executionConfig).build())) {
testHarness.open();
testHarness.setup();
for (long processingTime = 0; processingTime <= MAX_PROCESSING_TIME; processingTime++) {
testHarness.getProcessingTimeService().setCurrentTime(processingTime);
testHarness.emitNext();
}
List<LatencyMarker> expectedOutput = new ArrayList<>();
if (!shouldExpectLatencyMarkers) {
assertTrue(testHarness.getOutput().isEmpty());
} else {
expectedOutput.add(new LatencyMarker(1, testHarness.getOperator().getOperatorID(), 0));
for (long markedTime = LATENCY_MARK_INTERVAL; markedTime <= MAX_PROCESSING_TIME; markedTime += LATENCY_MARK_INTERVAL) {
expectedOutput.add(new LatencyMarker(markedTime, testHarness.getOperator().getOperatorID(), 0));
}
assertThat((Collection<Object>) testHarness.getOutput(), contains(expectedOutput.toArray()));
}
}
}
Aggregations