use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class DataSetSavepointWindowReaderITCase method testReduceEvictorWindowStateReader.
@Test
public void testReduceEvictorWindowStateReader() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(getStateBackend());
env.setParallelism(4);
env.addSource(createSource(numbers)).rebalance().assignTimestampsAndWatermarks(WatermarkStrategy.<Integer>noWatermarks().withTimestampAssigner((event, timestamp) -> 0)).keyBy(id -> id).window(TumblingEventTimeWindows.of(Time.milliseconds(10))).evictor(new NoOpEvictor<>()).reduce(new ReduceSum()).uid(uid).addSink(new DiscardingSink<>());
String savepointPath = takeSavepoint(env);
ExecutionEnvironment batchEnv = ExecutionEnvironment.getExecutionEnvironment();
ExistingSavepoint savepoint = Savepoint.load(batchEnv, savepointPath, getStateBackend());
List<Integer> results = savepoint.window(TumblingEventTimeWindows.of(Time.milliseconds(10))).evictor().reduce(uid, new ReduceSum(), Types.INT, Types.INT).collect();
Assert.assertThat("Unexpected results from keyed state", results, Matchers.containsInAnyOrder(numbers));
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class DataSetSavepointWindowReaderITCase method testProcessEvictorWindowStateReader.
@Test
public void testProcessEvictorWindowStateReader() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(getStateBackend());
env.setParallelism(4);
env.addSource(createSource(numbers)).rebalance().assignTimestampsAndWatermarks(WatermarkStrategy.<Integer>noWatermarks().withTimestampAssigner((event, timestamp) -> 0)).keyBy(id -> id).window(TumblingEventTimeWindows.of(Time.milliseconds(10))).evictor(new NoOpEvictor<>()).process(new NoOpProcessWindowFunction()).uid(uid).addSink(new DiscardingSink<>());
String savepointPath = takeSavepoint(env);
ExecutionEnvironment batchEnv = ExecutionEnvironment.getExecutionEnvironment();
ExistingSavepoint savepoint = Savepoint.load(batchEnv, savepointPath, getStateBackend());
List<Integer> results = savepoint.window(TumblingEventTimeWindows.of(Time.milliseconds(10))).evictor().process(uid, new BasicReaderFunction(), Types.INT, Types.INT, Types.INT).collect();
Assert.assertThat("Unexpected results from keyed state", results, Matchers.containsInAnyOrder(numbers));
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class DataSetSavepointWindowReaderITCase method testApplyEvictorWindowStateReader.
@Test
public void testApplyEvictorWindowStateReader() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(getStateBackend());
env.setParallelism(4);
env.addSource(createSource(numbers)).rebalance().assignTimestampsAndWatermarks(WatermarkStrategy.<Integer>noWatermarks().withTimestampAssigner((event, timestamp) -> 0)).keyBy(id -> id).window(TumblingEventTimeWindows.of(Time.milliseconds(10))).evictor(new NoOpEvictor<>()).apply(new NoOpWindowFunction()).uid(uid).addSink(new DiscardingSink<>());
String savepointPath = takeSavepoint(env);
ExecutionEnvironment batchEnv = ExecutionEnvironment.getExecutionEnvironment();
ExistingSavepoint savepoint = Savepoint.load(batchEnv, savepointPath, getStateBackend());
List<Integer> results = savepoint.window(TumblingEventTimeWindows.of(Time.milliseconds(1))).evictor().process(uid, new BasicReaderFunction(), Types.INT, Types.INT, Types.INT).collect();
Assert.assertThat("Unexpected results from keyed state", results, Matchers.containsInAnyOrder(numbers));
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class SavepointWindowReaderITCase method testReduceEvictorWindowStateReader.
@Test
public void testReduceEvictorWindowStateReader() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setStateBackend(getStateBackend());
env.setParallelism(4);
env.addSource(createSource(numbers)).rebalance().assignTimestampsAndWatermarks(WatermarkStrategy.<Integer>noWatermarks().withTimestampAssigner((event, timestamp) -> 0)).keyBy(id -> id).window(TumblingEventTimeWindows.of(Time.milliseconds(10))).evictor(new NoOpEvictor<>()).reduce(new ReduceSum()).uid(uid).addSink(new DiscardingSink<>());
String savepointPath = takeSavepoint(env);
SavepointReader savepoint = SavepointReader.read(env, savepointPath, getStateBackend());
List<Integer> results = JobResultRetriever.collect(savepoint.window(TumblingEventTimeWindows.of(Time.milliseconds(10))).evictor().reduce(uid, new ReduceSum(), Types.INT, Types.INT));
Assert.assertThat("Unexpected results from keyed state", results, Matchers.containsInAnyOrder(numbers));
}
use of org.apache.flink.api.common.eventtime.WatermarkStrategy in project flink by apache.
the class DataStreamTest method testErgonomicWatermarkStrategy.
/**
* Ensure that WatermarkStrategy is easy to use in the API, without superfluous generics.
*/
@Test
public void testErgonomicWatermarkStrategy() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<String> input = env.fromElements("bonjour");
// as soon as you have a chain of methods the first call needs a generic
input.assignTimestampsAndWatermarks(WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofMillis(10)));
// as soon as you have a chain of methods the first call needs to specify the generic type
input.assignTimestampsAndWatermarks(WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofMillis(10)).withTimestampAssigner((event, timestamp) -> 42L));
}
Aggregations