use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class DataStreamBatchExecutionITCase method batchKeyedNonKeyedTwoInputOperator.
/**
* Verifies that all regular input is processed before keyed input.
*
* <p>Here, the first input is keyed while the second input is not keyed.
*/
@Test
public void batchKeyedNonKeyedTwoInputOperator() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
DataStream<Tuple2<String, Integer>> keyedInput = env.fromElements(Tuple2.of("regular2", 4), Tuple2.of("regular1", 3), Tuple2.of("regular1", 2), Tuple2.of("regular2", 1)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1));
DataStream<Tuple2<String, Integer>> regularInput = env.fromElements(Tuple2.of("regular4", 4), Tuple2.of("regular3", 3), Tuple2.of("regular3", 2), Tuple2.of("regular4", 1)).assignTimestampsAndWatermarks(WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps().withTimestampAssigner((in, ts) -> in.f1));
DataStream<String> result = keyedInput.keyBy(in -> in.f0).connect(regularInput).transform("operator", BasicTypeInfo.STRING_TYPE_INFO, new TwoInputIdentityOperator());
try (CloseableIterator<String> resultIterator = result.executeAndCollect()) {
List<String> results = CollectionUtil.iteratorToList(resultIterator);
assertThat(results, equalTo(Arrays.asList("(regular4,4)", "(regular3,3)", "(regular3,2)", "(regular4,1)", "(regular1,2)", "(regular1,3)", "(regular2,1)", "(regular2,4)")));
}
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class StateDescriptorPassingTest method testReduceWindowState.
@Test
public void testReduceWindowState() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class);
DataStream<File> src = env.fromElements(new File("/")).assignTimestampsAndWatermarks(WatermarkStrategy.<File>forMonotonousTimestamps().withTimestampAssigner((file, ts) -> System.currentTimeMillis()));
SingleOutputStreamOperator<?> result = src.keyBy(new KeySelector<File, String>() {
@Override
public String getKey(File value) {
return null;
}
}).window(TumblingEventTimeWindows.of(Time.milliseconds(1000))).reduce(new ReduceFunction<File>() {
@Override
public File reduce(File value1, File value2) {
return null;
}
});
validateStateDescriptorConfigured(result);
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class StateDescriptorPassingTest method testApplyWindowAllState.
@Test
public void testApplyWindowAllState() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class);
// simulate ingestion time
DataStream<File> src = env.fromElements(new File("/")).assignTimestampsAndWatermarks(WatermarkStrategy.<File>forMonotonousTimestamps().withTimestampAssigner((file, ts) -> System.currentTimeMillis()));
SingleOutputStreamOperator<?> result = src.windowAll(TumblingEventTimeWindows.of(Time.milliseconds(1000))).apply(new AllWindowFunction<File, String, TimeWindow>() {
@Override
public void apply(TimeWindow window, Iterable<File> input, Collector<String> out) {
}
});
validateListStateDescriptorConfigured(result);
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class StateDescriptorPassingTest method testApplyWindowState.
@Test
public void testApplyWindowState() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class);
DataStream<File> src = env.fromElements(new File("/")).assignTimestampsAndWatermarks(WatermarkStrategy.<File>forMonotonousTimestamps().withTimestampAssigner((file, ts) -> System.currentTimeMillis()));
SingleOutputStreamOperator<?> result = src.keyBy(new KeySelector<File, String>() {
@Override
public String getKey(File value) {
return null;
}
}).window(TumblingEventTimeWindows.of(Time.milliseconds(1000))).apply(new WindowFunction<File, String, String, TimeWindow>() {
@Override
public void apply(String s, TimeWindow window, Iterable<File> input, Collector<String> out) {
}
});
validateListStateDescriptorConfigured(result);
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class StateDescriptorPassingTest method testProcessWindowState.
@Test
public void testProcessWindowState() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.registerTypeWithKryoSerializer(File.class, JavaSerializer.class);
DataStream<File> src = env.fromElements(new File("/")).assignTimestampsAndWatermarks(WatermarkStrategy.<File>forMonotonousTimestamps().withTimestampAssigner((file, ts) -> System.currentTimeMillis()));
SingleOutputStreamOperator<?> result = src.keyBy(new KeySelector<File, String>() {
@Override
public String getKey(File value) {
return null;
}
}).window(TumblingEventTimeWindows.of(Time.milliseconds(1000))).process(new ProcessWindowFunction<File, String, String, TimeWindow>() {
@Override
public void process(String s, Context ctx, Iterable<File> input, Collector<String> out) {
}
});
validateListStateDescriptorConfigured(result);
}
Aggregations