use of org.apache.flink.migration.runtime.state.StateHandle in project flink by apache.
the class SavepointV0Serializer method convertSubtaskState.
private org.apache.flink.runtime.checkpoint.SubtaskState convertSubtaskState(SubtaskState subtaskState, int parallelInstanceIdx, ClassLoader userClassLoader, long checkpointID) throws Exception {
SerializedValue<StateHandle<?>> serializedValue = subtaskState.getState();
StreamTaskStateList stateList = (StreamTaskStateList) serializedValue.deserializeValue(userClassLoader);
StreamTaskState[] streamTaskStates = stateList.getState(userClassLoader);
List<StreamStateHandle> newChainStateList = Arrays.asList(new StreamStateHandle[streamTaskStates.length]);
KeyGroupsStateHandle newKeyedState = null;
for (int chainIdx = 0; chainIdx < streamTaskStates.length; ++chainIdx) {
StreamTaskState streamTaskState = streamTaskStates[chainIdx];
if (streamTaskState == null) {
continue;
}
newChainStateList.set(chainIdx, convertOperatorAndFunctionState(streamTaskState));
HashMap<String, KvStateSnapshot<?, ?, ?, ?>> oldKeyedState = streamTaskState.getKvStates();
if (null != oldKeyedState) {
Preconditions.checkState(null == newKeyedState, "Found more than one keyed state in chain");
newKeyedState = convertKeyedBackendState(oldKeyedState, parallelInstanceIdx, checkpointID);
}
}
ChainedStateHandle<StreamStateHandle> newChainedState = new ChainedStateHandle<>(newChainStateList);
ChainedStateHandle<OperatorStateHandle> nopChain = new ChainedStateHandle<>(Arrays.asList(new OperatorStateHandle[newChainedState.getLength()]));
return new org.apache.flink.runtime.checkpoint.SubtaskState(newChainedState, nopChain, nopChain, newKeyedState, null);
}
use of org.apache.flink.migration.runtime.state.StateHandle in project flink by apache.
the class SavepointV0Serializer method deserialize.
@Override
public SavepointV1 deserialize(DataInputStream dis, ClassLoader userClassLoader) throws IOException {
long checkpointId = dis.readLong();
// Task states
int numTaskStates = dis.readInt();
List<TaskState> taskStates = new ArrayList<>(numTaskStates);
for (int i = 0; i < numTaskStates; i++) {
JobVertexID jobVertexId = new JobVertexID(dis.readLong(), dis.readLong());
int parallelism = dis.readInt();
// Add task state
TaskState taskState = new TaskState(jobVertexId, parallelism);
taskStates.add(taskState);
// Sub task states
int numSubTaskStates = dis.readInt();
for (int j = 0; j < numSubTaskStates; j++) {
int subtaskIndex = dis.readInt();
SerializedValue<StateHandle<?>> serializedValue = readSerializedValueStateHandle(dis);
long stateSize = dis.readLong();
long duration = dis.readLong();
SubtaskState subtaskState = new SubtaskState(serializedValue, stateSize, duration);
taskState.putState(subtaskIndex, subtaskState);
}
// Key group states
int numKvStates = dis.readInt();
for (int j = 0; j < numKvStates; j++) {
int keyGroupIndex = dis.readInt();
SerializedValue<StateHandle<?>> serializedValue = readSerializedValueStateHandle(dis);
long stateSize = dis.readLong();
long duration = dis.readLong();
KeyGroupState keyGroupState = new KeyGroupState(serializedValue, stateSize, duration);
taskState.putKvState(keyGroupIndex, keyGroupState);
}
}
try {
return convertSavepoint(taskStates, userClassLoader, checkpointId);
} catch (Exception e) {
throw new IOException(e);
}
}
Aggregations