use of org.apache.flink.streaming.api.datastream.KeyedStream in project flink by apache.
the class SortingBoundedInputITCase method testThreeInputOperator.
@Test
public void testThreeInputOperator() {
long numberOfRecords = 500_000;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Configuration config = new Configuration();
config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
env.configure(config, this.getClass().getClassLoader());
KeyedStream<Tuple2<Integer, byte[]>, Object> elements1 = env.fromParallelCollection(new InputGenerator(numberOfRecords), new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)).keyBy(el -> el.f0);
KeyedStream<Tuple2<Integer, byte[]>, Object> elements2 = env.fromParallelCollection(new InputGenerator(numberOfRecords), new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)).keyBy(el -> el.f0);
KeyedStream<Tuple2<Integer, byte[]>, Object> elements3 = env.fromParallelCollection(new InputGenerator(numberOfRecords), new TupleTypeInfo<>(BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)).keyBy(el -> el.f0);
KeyedMultipleInputTransformation<Long> assertingTransformation = new KeyedMultipleInputTransformation<>("Asserting operator", new AssertingThreeInputOperatorFactory(), BasicTypeInfo.LONG_TYPE_INFO, -1, BasicTypeInfo.INT_TYPE_INFO);
assertingTransformation.addInput(elements1.getTransformation(), elements1.getKeySelector());
assertingTransformation.addInput(elements2.getTransformation(), elements2.getKeySelector());
assertingTransformation.addInput(elements3.getTransformation(), elements3.getKeySelector());
env.addOperator(assertingTransformation);
DataStream<Long> counts = new DataStream<>(env, assertingTransformation);
long sum = CollectionUtil.iteratorToList(DataStreamUtils.collect(counts)).stream().mapToLong(l -> l).sum();
assertThat(sum, equalTo(numberOfRecords * 3));
}
use of org.apache.flink.streaming.api.datastream.KeyedStream in project flink by apache.
the class DataStreamTest method testKeyedConnectedStreamsType.
@Test
public void testKeyedConnectedStreamsType() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Integer> stream1 = env.fromElements(1, 2);
DataStreamSource<Integer> stream2 = env.fromElements(1, 2);
ConnectedStreams<Integer, Integer> connectedStreams = stream1.connect(stream2).keyBy(v -> v, v -> v);
KeyedStream<?, ?> firstKeyedInput = (KeyedStream<?, ?>) connectedStreams.getFirstInput();
KeyedStream<?, ?> secondKeyedInput = (KeyedStream<?, ?>) connectedStreams.getSecondInput();
assertThat(firstKeyedInput.getKeyType(), equalTo(Types.INT));
assertThat(secondKeyedInput.getKeyType(), equalTo(Types.INT));
}
use of org.apache.flink.streaming.api.datastream.KeyedStream in project flink by apache.
the class DataStreamTest method testFailedTranslationOnKeyed.
/**
* Tests that with a {@link KeyedStream} we have to provide a {@link
* KeyedBroadcastProcessFunction}.
*/
@Test
public void testFailedTranslationOnKeyed() {
final MapStateDescriptor<Long, String> descriptor = new MapStateDescriptor<>("broadcast", BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final DataStream<Long> srcOne = env.generateSequence(0L, 5L).assignTimestampsAndWatermarks(new CustomWmEmitter<Long>() {
@Override
public long extractTimestamp(Long element, long previousElementTimestamp) {
return element;
}
}).keyBy((KeySelector<Long, Long>) value -> value);
final DataStream<String> srcTwo = env.fromElements("Test:0", "Test:1", "Test:2", "Test:3", "Test:4", "Test:5").assignTimestampsAndWatermarks(new CustomWmEmitter<String>() {
@Override
public long extractTimestamp(String element, long previousElementTimestamp) {
return Long.parseLong(element.split(":")[1]);
}
});
BroadcastStream<String> broadcast = srcTwo.broadcast(descriptor);
BroadcastConnectedStream<Long, String> bcStream = srcOne.connect(broadcast);
expectedException.expect(IllegalArgumentException.class);
bcStream.process(new BroadcastProcessFunction<Long, String, String>() {
@Override
public void processBroadcastElement(String value, Context ctx, Collector<String> out) throws Exception {
// do nothing
}
@Override
public void processElement(Long value, ReadOnlyContext ctx, Collector<String> out) throws Exception {
// do nothing
}
});
}
Aggregations