use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class MetadataV2Serializer method deserializeOperatorState.
@Override
protected OperatorState deserializeOperatorState(DataInputStream dis, @Nullable DeserializationContext context) throws IOException {
final OperatorID jobVertexId = new OperatorID(dis.readLong(), dis.readLong());
final int parallelism = dis.readInt();
final int maxParallelism = dis.readInt();
// this field was "chain length" before Flink 1.3, and it is still part
// of the format, despite being unused
dis.readInt();
// Add task state
final OperatorState taskState = new OperatorState(jobVertexId, parallelism, maxParallelism);
// Sub task states
final int numSubTaskStates = dis.readInt();
for (int j = 0; j < numSubTaskStates; j++) {
final int subtaskIndex = dis.readInt();
final OperatorSubtaskState subtaskState = deserializeSubtaskState(dis, context);
taskState.putState(subtaskIndex, subtaskState);
}
return taskState;
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class CheckpointTestUtils method createOperatorStates.
/**
* Creates a random collection of OperatorState objects containing various types of state
* handles.
*
* @param basePath The basePath for savepoint, will be null for checkpoint.
* @param numAllRunningTaskStates Number of tasks belong to all running vertex.
* @param numPartlyFinishedTaskStates Number of tasks belong to partly finished vertex.
* @param numFullyFinishedTaskStates Number of tasks belong to fully finished vertex.
* @param numSubtasksPerTask Number of subtasks for each task.
*/
public static Collection<OperatorState> createOperatorStates(Random random, @Nullable String basePath, int numAllRunningTaskStates, int numPartlyFinishedTaskStates, int numFullyFinishedTaskStates, int numSubtasksPerTask) {
List<OperatorState> taskStates = new ArrayList<>(numAllRunningTaskStates + numPartlyFinishedTaskStates + numFullyFinishedTaskStates);
for (int stateIdx = 0; stateIdx < numAllRunningTaskStates; ++stateIdx) {
OperatorState taskState = new OperatorState(new OperatorID(), numSubtasksPerTask, 128);
randomlySetCoordinatorState(taskState, random);
randomlySetSubtaskState(taskState, IntStream.range(0, numSubtasksPerTask).toArray(), random, basePath);
taskStates.add(taskState);
}
for (int stateIdx = 0; stateIdx < numPartlyFinishedTaskStates; ++stateIdx) {
OperatorState taskState = new OperatorState(new OperatorID(), numSubtasksPerTask, 128);
randomlySetCoordinatorState(taskState, random);
randomlySetSubtaskState(taskState, IntStream.range(0, numSubtasksPerTask / 2).toArray(), random, basePath);
IntStream.range(numSubtasksPerTask / 2, numSubtasksPerTask).forEach(index -> taskState.putState(index, FinishedOperatorSubtaskState.INSTANCE));
taskStates.add(taskState);
}
for (int stateIdx = 0; stateIdx < numFullyFinishedTaskStates; ++stateIdx) {
taskStates.add(new FullyFinishedOperatorState(new OperatorID(), numSubtasksPerTask, 128));
}
return taskStates;
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class BroadcastStateInputFormatTest method testReadBroadcastState.
@Test
public void testReadBroadcastState() throws Exception {
try (TwoInputStreamOperatorTestHarness<Void, Integer, Void> testHarness = getTestHarness()) {
testHarness.open();
testHarness.processElement2(new StreamRecord<>(1));
testHarness.processElement2(new StreamRecord<>(2));
testHarness.processElement2(new StreamRecord<>(3));
OperatorSubtaskState subtaskState = testHarness.snapshot(0, 0);
OperatorState state = new OperatorState(OperatorIDGenerator.fromUid("uid"), 1, 4);
state.putState(0, subtaskState);
OperatorStateInputSplit split = new OperatorStateInputSplit(subtaskState.getManagedOperatorState(), 0);
BroadcastStateInputFormat<Integer, Integer> format = new BroadcastStateInputFormat<>(state, new Configuration(), null, descriptor);
format.setRuntimeContext(new MockStreamingRuntimeContext(false, 1, 0));
format.open(split);
Map<Integer, Integer> results = new HashMap<>(3);
while (!format.reachedEnd()) {
Tuple2<Integer, Integer> entry = format.nextRecord(null);
results.put(entry.f0, entry.f1);
}
Map<Integer, Integer> expected = new HashMap<>(3);
expected.put(1, 1);
expected.put(2, 2);
expected.put(3, 3);
Assert.assertEquals("Failed to read correct list state from state backend", expected, results);
}
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class KeyedStateInputFormatTest method testReadTime.
@Test
public void testReadTime() throws Exception {
OperatorID operatorID = OperatorIDGenerator.fromUid("uid");
OperatorSubtaskState state = createOperatorSubtaskState(new KeyedProcessOperator<>(new StatefulFunctionWithTime()));
OperatorState operatorState = new OperatorState(operatorID, 1, 128);
operatorState.putState(0, state);
KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new TimerReaderFunction(), Types.INT));
KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];
KeyedStateReaderFunction<Integer, Integer> userFunction = new TimerReaderFunction();
List<Integer> data = readInputSplit(split, userFunction);
Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 1, 2, 2, 3, 3), data);
}
use of org.apache.flink.runtime.checkpoint.OperatorState in project flink by apache.
the class KeyedStateInputFormatTest method testReadState.
@Test
public void testReadState() throws Exception {
OperatorID operatorID = OperatorIDGenerator.fromUid("uid");
OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction()));
OperatorState operatorState = new OperatorState(operatorID, 1, 128);
operatorState.putState(0, state);
KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>(operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new ReaderFunction(), Types.INT));
KeyGroupRangeInputSplit split = format.createInputSplits(1)[0];
KeyedStateReaderFunction<Integer, Integer> userFunction = new ReaderFunction();
List<Integer> data = readInputSplit(split, userFunction);
Assert.assertEquals("Incorrect data read from input split", Arrays.asList(1, 2, 3), data);
}
Aggregations