use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class TwoPhaseCommitSinkFunctionTest method testRecoverFromStateAfterFinished.
@Test
public void testRecoverFromStateAfterFinished() throws Exception {
harness.open();
harness.processElement("42", 0);
sinkFunction.finish();
OperatorSubtaskState operatorSubtaskState = harness.snapshot(2, 5);
closeTestHarness();
setUpTestHarness();
harness.initializeState(operatorSubtaskState);
harness.open();
assertEquals(0, sinkFunction.abortedTransactions.size());
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class GlobalCommitterOperatorTest method testStateRestore.
@Test
void testStateRestore() throws Exception {
final MockCommitter committer = new MockCommitter();
final OneInputStreamOperatorTestHarness<CommittableMessage<Integer>, Void> testHarness = createTestHarness(committer);
testHarness.open();
final CommittableSummary<Integer> committableSummary = new CommittableSummary<>(1, 1, 0L, 1, 1, 0);
testHarness.processElement(new StreamRecord<>(committableSummary));
final CommittableWithLineage<Integer> first = new CommittableWithLineage<>(1, 0L, 1);
testHarness.processElement(new StreamRecord<>(first));
final OperatorSubtaskState snapshot = testHarness.snapshot(0L, 2L);
assertThat(testHarness.getOutput()).isEmpty();
testHarness.close();
assertThat(committer.committed).isEmpty();
final OneInputStreamOperatorTestHarness<CommittableMessage<Integer>, Void> restored = createTestHarness(committer);
restored.initializeState(snapshot);
restored.open();
assertThat(testHarness.getOutput()).isEmpty();
assertThat(committer.committed).containsExactly(1);
restored.close();
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project flink by apache.
the class RestoreStreamTaskTest method testRestoreAfterScaleUp.
@Test
public void testRestoreAfterScaleUp() throws Exception {
OperatorID headOperatorID = new OperatorID(42L, 42L);
OperatorID tailOperatorID = new OperatorID(44L, 44L);
JobManagerTaskRestore restore = createRunAndCheckpointOperatorChain(headOperatorID, new CounterOperator(), tailOperatorID, new CounterOperator(), Optional.empty());
TaskStateSnapshot stateHandles = restore.getTaskStateSnapshot();
assertEquals(2, stateHandles.getSubtaskStateMappings().size());
// test empty state in case of scale up
OperatorSubtaskState emptyHeadOperatorState = OperatorSubtaskState.builder().build();
stateHandles.putSubtaskStateByOperatorID(headOperatorID, emptyHeadOperatorState);
createRunAndCheckpointOperatorChain(headOperatorID, new CounterOperator(), tailOperatorID, new CounterOperator(), Optional.of(restore));
assertEquals(new HashSet<>(Arrays.asList(headOperatorID, tailOperatorID)), RESTORED_OPERATORS.keySet());
assertThat(new HashSet<>(RESTORED_OPERATORS.values()), contains(restore.getRestoreCheckpointId()));
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project beam by apache.
the class WindowDoFnOperatorTest method testRestore.
@Test
public void testRestore() throws Exception {
// test harness
KeyedOneInputStreamOperatorTestHarness<ByteBuffer, WindowedValue<KeyedWorkItem<Long, Long>>, WindowedValue<KV<Long, Long>>> testHarness = createTestHarness(getWindowDoFnOperator());
testHarness.open();
// process elements
IntervalWindow window = new IntervalWindow(new Instant(0), Duration.millis(10_000));
testHarness.processWatermark(0L);
testHarness.processElement(Item.builder().key(1L).timestamp(1L).value(100L).window(window).build().toStreamRecord());
testHarness.processElement(Item.builder().key(1L).timestamp(2L).value(20L).window(window).build().toStreamRecord());
testHarness.processElement(Item.builder().key(2L).timestamp(3L).value(77L).window(window).build().toStreamRecord());
// create snapshot
OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
testHarness.close();
// restore from the snapshot
testHarness = createTestHarness(getWindowDoFnOperator());
testHarness.initializeState(snapshot);
testHarness.open();
// close window
testHarness.processWatermark(10_000L);
Iterable<WindowedValue<KV<Long, Long>>> output = stripStreamRecordFromWindowedValue(testHarness.getOutput());
assertEquals(2, Iterables.size(output));
assertThat(output, containsInAnyOrder(WindowedValue.of(KV.of(1L, 120L), new Instant(9_999), window, PaneInfo.createPane(true, true, ON_TIME)), WindowedValue.of(KV.of(2L, 77L), new Instant(9_999), window, PaneInfo.createPane(true, true, ON_TIME))));
// cleanup
testHarness.close();
}
use of org.apache.flink.runtime.checkpoint.OperatorSubtaskState in project beam by apache.
the class DoFnOperatorTest method sideInputCheckpointing.
void sideInputCheckpointing(TestHarnessFactory<TwoInputStreamOperatorTestHarness<WindowedValue<String>, RawUnionValue, WindowedValue<String>>> harnessFactory) throws Exception {
TwoInputStreamOperatorTestHarness<WindowedValue<String>, RawUnionValue, WindowedValue<String>> testHarness = harnessFactory.create();
testHarness.open();
IntervalWindow firstWindow = new IntervalWindow(new Instant(0), new Instant(100));
IntervalWindow secondWindow = new IntervalWindow(new Instant(0), new Instant(500));
// push in some side inputs for both windows
testHarness.processElement2(new StreamRecord<>(new RawUnionValue(1, valuesInWindow(PCollectionViewTesting.materializeValuesFor(view1.getPipeline().getOptions(), View.asIterable(), "hello", "ciao"), new Instant(0), firstWindow))));
testHarness.processElement2(new StreamRecord<>(new RawUnionValue(2, valuesInWindow(PCollectionViewTesting.materializeValuesFor(view2.getPipeline().getOptions(), View.asIterable(), "foo", "bar"), new Instant(0), secondWindow))));
// snapshot state, throw away the operator, then restore and verify that we still match
// main-input elements to the side-inputs that we sent earlier
OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
testHarness = harnessFactory.create();
testHarness.initializeState(snapshot);
testHarness.open();
// push in main-input elements
WindowedValue<String> helloElement = valueInWindow("Hello", new Instant(0), firstWindow);
WindowedValue<String> worldElement = valueInWindow("World", new Instant(1000), firstWindow);
testHarness.processElement1(new StreamRecord<>(helloElement));
testHarness.processElement1(new StreamRecord<>(worldElement));
assertThat(stripStreamRecordFromWindowedValue(testHarness.getOutput()), contains(helloElement, worldElement));
testHarness.close();
}
Aggregations