use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class WindowOperatorTest method testReduceSessionWindows.
@Test
@SuppressWarnings("unchecked")
public void testReduceSessionWindows() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc = new ReducingStateDescriptor<>("window-contents", new SumReducer(), STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Tuple2<String, Integer>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(sessionSize)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalSingleValueWindowFunction<>(new ReducedSessionWindowFunction()), EventTimeTrigger.create(), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
expectedOutput.add(new Watermark(12000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
testHarness.processWatermark(new Watermark(17999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
expectedOutput.add(new Watermark(17999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class WindowOperatorTest method testSessionWindows.
@Test
@SuppressWarnings("unchecked")
public void testSessionWindows() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents", STRING_INT_TUPLE.createSerializer(new ExecutionConfig()));
WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple3<String, Long, Long>, TimeWindow> operator = new WindowOperator<>(EventTimeSessionWindows.withGap(Time.seconds(sessionSize)), new TimeWindow.Serializer(), new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()), stateDesc, new InternalIterableWindowFunction<>(new SessionWindowFunction()), EventTimeTrigger.create(), 0, null);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>> testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
testHarness.close();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
expectedOutput.add(new Watermark(12000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
testHarness.processWatermark(new Watermark(17999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
expectedOutput.add(new Watermark(17999));
TestHarnessUtil.assertOutputEqualsSorted("Output was not correct.", expectedOutput, testHarness.getOutput(), new Tuple3ResultSortComparator());
testHarness.close();
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class SinkWriterOperatorTest method testStateRestore.
@ParameterizedTest
@ValueSource(booleans = { true, false })
void testStateRestore(boolean stateful) throws Exception {
final long initialTime = 0;
final SnapshottingBufferingSinkWriter snapshottingWriter = new SnapshottingBufferingSinkWriter();
final OneInputStreamOperatorTestHarness<Integer, CommittableMessage<Integer>> testHarness = createTestHarnessWithBufferingSinkWriter(snapshottingWriter, stateful);
testHarness.open();
testHarness.processWatermark(initialTime);
testHarness.processElement(1, initialTime + 1);
testHarness.processElement(2, initialTime + 2);
testHarness.prepareSnapshotPreBarrier(1L);
OperatorSubtaskState snapshot = testHarness.snapshot(1L, 1L);
// we see the watermark and the committable summary, so the committables must be stored in
// state
assertThat(testHarness.getOutput()).hasSize(2).contains(new Watermark(initialTime));
assertThat(snapshottingWriter.lastCheckpointId).isEqualTo(stateful ? 1L : SnapshottingBufferingSinkWriter.NOT_SNAPSHOTTED);
testHarness.close();
final OneInputStreamOperatorTestHarness<Integer, CommittableMessage<Integer>> restoredTestHarness = createTestHarnessWithBufferingSinkWriter(new SnapshottingBufferingSinkWriter(), stateful);
restoredTestHarness.initializeState(snapshot);
restoredTestHarness.open();
// this will flush out the committables that were restored
restoredTestHarness.endInput();
final long checkpointId = 2;
restoredTestHarness.prepareSnapshotPreBarrier(checkpointId);
if (stateful) {
assertBasicOutput(restoredTestHarness.getOutput(), 2, checkpointId);
} else {
assertThat(fromOutput(restoredTestHarness.getOutput()).get(0).asRecord().getValue()).isInstanceOf(CommittableSummary.class).satisfies(cs -> SinkV2Assertions.assertThat((CommittableSummary<?>) cs).hasOverallCommittables(0).hasPendingCommittables(0).hasFailedCommittables(0));
}
restoredTestHarness.close();
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class TaskLocalStateStoreImplTest method storeStates.
private List<TestingTaskStateSnapshot> storeStates(int count) {
List<TestingTaskStateSnapshot> taskStateSnapshots = new ArrayList<>(count);
for (int i = 0; i < count; ++i) {
OperatorID operatorID = new OperatorID();
TestingTaskStateSnapshot taskStateSnapshot = new TestingTaskStateSnapshot();
OperatorSubtaskState operatorSubtaskState = OperatorSubtaskState.builder().build();
taskStateSnapshot.putSubtaskStateByOperatorID(operatorID, operatorSubtaskState);
taskLocalStateStore.storeLocalState(i, taskStateSnapshot);
taskStateSnapshots.add(taskStateSnapshot);
}
return taskStateSnapshots;
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class TestTaskStateManager method prioritizedOperatorState.
@Nonnull
@Override
public PrioritizedOperatorSubtaskState prioritizedOperatorState(OperatorID operatorID) {
TaskStateSnapshot jmTaskStateSnapshot = getLastJobManagerTaskStateSnapshot();
TaskStateSnapshot tmTaskStateSnapshot = getLastTaskManagerTaskStateSnapshot();
if (jmTaskStateSnapshot == null) {
return PrioritizedOperatorSubtaskState.emptyNotRestored();
} else {
OperatorSubtaskState jmOpState = jmTaskStateSnapshot.getSubtaskStateByOperatorID(operatorID);
if (jmOpState == null) {
return PrioritizedOperatorSubtaskState.emptyNotRestored();
} else {
List<OperatorSubtaskState> tmStateCollection = Collections.emptyList();
if (tmTaskStateSnapshot != null) {
OperatorSubtaskState tmOpState = tmTaskStateSnapshot.getSubtaskStateByOperatorID(operatorID);
if (tmOpState != null) {
tmStateCollection = Collections.singletonList(tmOpState);
}
}
PrioritizedOperatorSubtaskState.Builder builder = new PrioritizedOperatorSubtaskState.Builder(jmOpState, tmStateCollection, reportedCheckpointId);
return builder.build();
}
}
}
Aggregations