use of org.apache.flink.api.connector.source.lib.NumberSequenceSource in project flink by apache.
the class JobCancelingITCase method testCancelingWhileBackPressured.
@Test
public void testCancelingWhileBackPressured() throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
env.getConfig().enableObjectReuse();
// Basically disable interrupts and JVM killer watchdogs
env.getConfig().setTaskCancellationTimeout(Duration.ofDays(1).toMillis());
env.getConfig().setTaskCancellationInterval(Duration.ofDays(1).toMillis());
// Check both FLIP-27 and normal sources
final DataStreamSource<Long> source1 = env.fromSource(new NumberSequenceSource(1L, Long.MAX_VALUE), WatermarkStrategy.noWatermarks(), "source-1");
// otherwise split enumerator will generate splits that can start emitting from very large
// numbers, that do not work well with ExplodingFlatMapFunction
source1.setParallelism(1);
final DataStream<Long> source2 = env.addSource(new InfiniteLongSourceFunction());
source1.connect(source2).flatMap(new ExplodingFlatMapFunction()).startNewChain().addSink(new SleepingSink());
StreamGraph streamGraph = env.getStreamGraph();
JobGraph jobGraph = streamGraph.getJobGraph();
ClusterClient<?> client = MINI_CLUSTER.getClusterClient();
JobID jobID = client.submitJob(jobGraph).get();
waitForAllTaskRunning(MINI_CLUSTER.getMiniCluster(), jobID, false);
// give a bit of time of back pressure to build up
Thread.sleep(100);
client.cancel(jobID).get();
while (!client.getJobStatus(jobID).get().isTerminalState()) {
}
assertEquals(JobStatus.CANCELED, client.getJobStatus(jobID).get());
}
use of org.apache.flink.api.connector.source.lib.NumberSequenceSource in project flink by apache.
the class SourceNAryInputChainingITCase method createProgramWithMultipleUnionInputs.
/**
* Creates a DataStream program as shown below.
*
* <pre>
* +--------------+
* (src 1) --> (map) --> | |
* | |
* (src 2) --+ | |
* +-- UNION --> | |
* (src 3) --+ | N-Ary |
* | Operator |
* (src 4) -> (map) --+ | |
* +-- UNION -> | |
* (src 5) -> (map) --+ | |
* | |
* (src 6) --> | |
* +--------------+
* </pre>
*/
private DataStream<Long> createProgramWithMultipleUnionInputs() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
env.getConfig().enableObjectReuse();
final DataStream<Long> source1 = env.fromSource(new NumberSequenceSource(1L, 10L), WatermarkStrategy.noWatermarks(), "source-1");
final DataStream<Long> source2 = env.fromSource(new NumberSequenceSource(11L, 20L), WatermarkStrategy.noWatermarks(), "source-2");
final DataStream<Long> source3 = env.fromSource(new NumberSequenceSource(21L, 30L), WatermarkStrategy.noWatermarks(), "source-3");
final DataStream<Long> source4 = env.fromSource(new NumberSequenceSource(31L, 40L), WatermarkStrategy.noWatermarks(), "source-4");
final DataStream<Long> source5 = env.fromSource(new NumberSequenceSource(41L, 50L), WatermarkStrategy.noWatermarks(), "source-5");
final DataStream<Long> source6 = env.fromSource(new NumberSequenceSource(51L, 60L), WatermarkStrategy.noWatermarks(), "source-6");
return nAryInputStreamOperation(source1.map((v) -> v), source2.union(source3), source4.map((v) -> v).union(source5.map((v) -> v)), source6);
}
use of org.apache.flink.api.connector.source.lib.NumberSequenceSource in project flink by apache.
the class SourceNAryInputChainingITCase method createProgramWithMixedInputs.
/**
* Creates a DataStream program as shown below.
*
* <pre>
* +--------------+
* (src 1) --> (map) --> | |
* | N-Ary |
* (src 2) --> | |
* | Operator |
* (src 3) --> (map) --> | |
* +--------------+
* </pre>
*/
private DataStream<Long> createProgramWithMixedInputs() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
env.getConfig().enableObjectReuse();
final DataStream<Long> source1 = env.fromSource(new NumberSequenceSource(1L, 10L), WatermarkStrategy.noWatermarks(), "source-1");
final DataStream<Long> source2 = env.fromSource(new NumberSequenceSource(11L, 20L), WatermarkStrategy.noWatermarks(), "source-2");
final DataStream<Long> source3 = env.fromSource(new NumberSequenceSource(21L, 30L), WatermarkStrategy.noWatermarks(), "source-3");
final DataStream<Long> stream1 = source1.map(v -> v);
final DataStream<Long> stream3 = source3.map(v -> v);
return nAryInputStreamOperation(stream1, source2, stream3);
}
use of org.apache.flink.api.connector.source.lib.NumberSequenceSource in project flink by apache.
the class StreamingJobGraphGeneratorTest method testYieldingOperatorProperlyChainedOnNewSources.
/**
* Tests that {@link org.apache.flink.streaming.api.operators.YieldingOperatorFactory} are
* chained to new sources, see FLINK-20444.
*/
@Test
public void testYieldingOperatorProperlyChainedOnNewSources() {
StreamExecutionEnvironment chainEnv = StreamExecutionEnvironment.createLocalEnvironment(1);
chainEnv.fromSource(new NumberSequenceSource(0, 10), WatermarkStrategy.noWatermarks(), "input").map((x) -> x).transform("test", BasicTypeInfo.LONG_TYPE_INFO, new YieldingTestOperatorFactory<>()).addSink(new DiscardingSink<>());
final JobGraph jobGraph = chainEnv.getStreamGraph().getJobGraph();
final List<JobVertex> vertices = jobGraph.getVerticesSortedTopologicallyFromSources();
Assert.assertEquals(1, vertices.size());
assertEquals(4, vertices.get(0).getOperatorIDs().size());
}
use of org.apache.flink.api.connector.source.lib.NumberSequenceSource in project flink by apache.
the class StreamingJobGraphGeneratorTest method createJobGraphWithDescription.
private JobGraph createJobGraphWithDescription(StreamExecutionEnvironment env, String... inputNames) {
env.setParallelism(1);
DataStream<Long> source;
if (inputNames.length == 1) {
source = env.fromElements(1L, 2L, 3L).setDescription(inputNames[0]);
} else {
MultipleInputTransformation<Long> transform = new MultipleInputTransformation<>("mit", new UnusedOperatorFactory(), Types.LONG, env.getParallelism());
transform.setDescription("operator chained with source");
transform.setChainingStrategy(ChainingStrategy.HEAD_WITH_SOURCES);
Arrays.stream(inputNames).map(name -> env.fromSource(new NumberSequenceSource(1, 2), WatermarkStrategy.noWatermarks(), name).setDescription(name).getTransformation()).forEach(transform::addInput);
source = new DataStream<>(env, transform);
}
DataStream<Long> map1 = source.map(x -> x + 1).setDescription("x + 1");
DataStream<Long> map2 = source.map(x -> x + 2).setDescription("x + 2");
map1.print().setDescription("first print of map1");
map1.print().setDescription("second print of map1");
map2.print().setDescription("first print of map2");
map2.print().setDescription("second print of map2");
return StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph());
}
Aggregations