use of org.apache.flink.streaming.api.datastream.MultipleConnectedStreams in project flink-mirror by flink-ci.
the class DataStreamBatchExecutionITCase method batchMixedKeyedAndNonKeyedMultiInputOperator.
@Test
public void batchMixedKeyedAndNonKeyedMultiInputOperator() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
DataStream<Tuple2<String, Integer>> bc1Input = env.fromElements(Tuple2.of("bc3", 3), Tuple2.of("bc2", 2)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1)).broadcast();
DataStream<Tuple2<String, Integer>> bc2Input = env.fromElements(Tuple2.of("bc1", 1)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1)).broadcast();
DataStream<Tuple2<String, Integer>> regularInput = env.fromElements(Tuple2.of("regular1", 1), Tuple2.of("regular1", 2), Tuple2.of("regular1", 3), Tuple2.of("regular1", 4), Tuple2.of("regular2", 3), Tuple2.of("regular2", 5), Tuple2.of("regular1", 3)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1)).keyBy(input -> input.f0);
KeyedMultipleInputTransformation<String> multipleInputTransformation = new KeyedMultipleInputTransformation<>("operator", mixedInputsOperatorFactory, BasicTypeInfo.STRING_TYPE_INFO, 1, BasicTypeInfo.STRING_TYPE_INFO);
multipleInputTransformation.addInput(regularInput.getTransformation(), input -> ((Tuple2<String, Integer>) input).f0);
multipleInputTransformation.addInput(bc1Input.getTransformation(), null);
multipleInputTransformation.addInput(bc2Input.getTransformation(), null);
DataStream<String> result = new MultipleConnectedStreams(env).transform(multipleInputTransformation);
try (CloseableIterator<String> resultIterator = result.executeAndCollect()) {
List<String> results = CollectionUtil.iteratorToList(resultIterator);
assertThat(results, equalTo(Arrays.asList("(regular1,1): [bc3, bc2, bc1]", "(regular1,2): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,4): [bc3, bc2, bc1]", "(regular2,3): [bc3, bc2, bc1]", "(regular2,5): [bc3, bc2, bc1]")));
}
}
use of org.apache.flink.streaming.api.datastream.MultipleConnectedStreams in project flink by splunk.
the class StreamGraphGeneratorBatchExecutionTest method testInputSelectableMultiInputTransformation.
@Test
public void testInputSelectableMultiInputTransformation() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Integer> elements1 = env.fromElements(1, 2);
DataStreamSource<Integer> elements2 = env.fromElements(1, 2);
DataStreamSource<Integer> elements3 = env.fromElements(1, 2);
MultipleInputOperatorFactory selectableOperator = new MultipleInputOperatorFactory(3, true);
KeyedMultipleInputTransformation<Integer> multipleInputTransformation = new KeyedMultipleInputTransformation<>("operator", selectableOperator, BasicTypeInfo.INT_TYPE_INFO, 1, BasicTypeInfo.INT_TYPE_INFO);
multipleInputTransformation.addInput(elements1.getTransformation(), e -> e);
multipleInputTransformation.addInput(elements2.getTransformation(), e -> e);
multipleInputTransformation.addInput(elements3.getTransformation(), e -> e);
DataStreamSink<Integer> sink = new MultipleConnectedStreams(env).transform(multipleInputTransformation).addSink(new DiscardingSink<>());
expectedException.expect(IllegalStateException.class);
expectedException.expectMessage("Batch state backend and sorting inputs are not supported in graphs with an InputSelectable operator.");
getStreamGraphInBatchMode(sink);
}
use of org.apache.flink.streaming.api.datastream.MultipleConnectedStreams in project flink by splunk.
the class DataStreamBatchExecutionITCase method batchMixedKeyedAndNonKeyedMultiInputOperator.
@Test
public void batchMixedKeyedAndNonKeyedMultiInputOperator() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
DataStream<Tuple2<String, Integer>> bc1Input = env.fromElements(Tuple2.of("bc3", 3), Tuple2.of("bc2", 2)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1)).broadcast();
DataStream<Tuple2<String, Integer>> bc2Input = env.fromElements(Tuple2.of("bc1", 1)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1)).broadcast();
DataStream<Tuple2<String, Integer>> regularInput = env.fromElements(Tuple2.of("regular1", 1), Tuple2.of("regular1", 2), Tuple2.of("regular1", 3), Tuple2.of("regular1", 4), Tuple2.of("regular2", 3), Tuple2.of("regular2", 5), Tuple2.of("regular1", 3)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1)).keyBy(input -> input.f0);
KeyedMultipleInputTransformation<String> multipleInputTransformation = new KeyedMultipleInputTransformation<>("operator", mixedInputsOperatorFactory, BasicTypeInfo.STRING_TYPE_INFO, 1, BasicTypeInfo.STRING_TYPE_INFO);
multipleInputTransformation.addInput(regularInput.getTransformation(), input -> ((Tuple2<String, Integer>) input).f0);
multipleInputTransformation.addInput(bc1Input.getTransformation(), null);
multipleInputTransformation.addInput(bc2Input.getTransformation(), null);
DataStream<String> result = new MultipleConnectedStreams(env).transform(multipleInputTransformation);
try (CloseableIterator<String> resultIterator = result.executeAndCollect()) {
List<String> results = CollectionUtil.iteratorToList(resultIterator);
assertThat(results, equalTo(Arrays.asList("(regular1,1): [bc3, bc2, bc1]", "(regular1,2): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,4): [bc3, bc2, bc1]", "(regular2,3): [bc3, bc2, bc1]", "(regular2,5): [bc3, bc2, bc1]")));
}
}
use of org.apache.flink.streaming.api.datastream.MultipleConnectedStreams in project flink by splunk.
the class MultipleInputITCase method testKeyedState.
@Test
public void testKeyedState() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
TestListResultSink<Long> resultSink = new TestListResultSink<>();
DataStream<Long> source1 = env.fromElements(0L, 3L);
DataStream<Long> source2 = env.fromElements(13L, 16L);
DataStream<Long> source3 = env.fromElements(101L, 104L);
KeyedMultipleInputTransformation<Long> transform = new KeyedMultipleInputTransformation<>("My Operator", new KeyedSumMultipleInputOperatorFactory(), BasicTypeInfo.LONG_TYPE_INFO, 1, BasicTypeInfo.LONG_TYPE_INFO);
KeySelector<Long, Long> keySelector = (KeySelector<Long, Long>) value -> value % 3;
env.addOperator(transform.addInput(source1.getTransformation(), keySelector).addInput(source2.getTransformation(), keySelector).addInput(source3.getTransformation(), keySelector));
new MultipleConnectedStreams(env).transform(transform).addSink(resultSink);
env.execute();
List<Long> result = resultSink.getResult();
Collections.sort(result);
assertThat(result, contains(0L, 3L, 13L, 13L + 16L, 101L, 101L + 104L));
}
use of org.apache.flink.streaming.api.datastream.MultipleConnectedStreams in project flink by splunk.
the class SourceNAryInputChainingITCase method nAryInputStreamOperation.
private static DataStream<Long> nAryInputStreamOperation(final DataStream<?>... inputs) {
final StreamExecutionEnvironment env = inputs[0].getExecutionEnvironment();
// this is still clumsy due to the raw API
final MultipleInputTransformation<Long> transform = new MultipleInputTransformation<>("MultipleInputOperator", new NAryUnionOpFactory(inputs.length), Types.LONG, env.getParallelism());
for (DataStream<?> input : inputs) {
transform.addInput(input.getTransformation());
}
transform.setChainingStrategy(ChainingStrategy.HEAD_WITH_SOURCES);
env.addOperator(transform);
return new MultipleConnectedStreams(env).transform(transform);
}
Aggregations