use of org.apache.flink.migration.runtime.checkpoint.SubtaskState in project flink by apache.
the class SavepointV0Serializer method determineOperatorChainLength.
private int determineOperatorChainLength(TaskState taskState, ClassLoader userClassLoader) throws IOException, ClassNotFoundException {
Collection<SubtaskState> subtaskStates = taskState.getStates();
if (subtaskStates == null || subtaskStates.isEmpty()) {
return 0;
}
SubtaskState firstSubtaskState = subtaskStates.iterator().next();
Object toCastTaskStateList = firstSubtaskState.getState().deserializeValue(userClassLoader);
if (toCastTaskStateList instanceof StreamTaskStateList) {
StreamTaskStateList taskStateList = (StreamTaskStateList) toCastTaskStateList;
StreamTaskState[] streamTaskStates = taskStateList.getState(userClassLoader);
return streamTaskStates.length;
}
return 0;
}
use of org.apache.flink.migration.runtime.checkpoint.SubtaskState in project flink by apache.
the class SavepointV0Serializer method convertSubtaskState.
private org.apache.flink.runtime.checkpoint.SubtaskState convertSubtaskState(SubtaskState subtaskState, int parallelInstanceIdx, ClassLoader userClassLoader, long checkpointID) throws Exception {
SerializedValue<StateHandle<?>> serializedValue = subtaskState.getState();
StreamTaskStateList stateList = (StreamTaskStateList) serializedValue.deserializeValue(userClassLoader);
StreamTaskState[] streamTaskStates = stateList.getState(userClassLoader);
List<StreamStateHandle> newChainStateList = Arrays.asList(new StreamStateHandle[streamTaskStates.length]);
KeyGroupsStateHandle newKeyedState = null;
for (int chainIdx = 0; chainIdx < streamTaskStates.length; ++chainIdx) {
StreamTaskState streamTaskState = streamTaskStates[chainIdx];
if (streamTaskState == null) {
continue;
}
newChainStateList.set(chainIdx, convertOperatorAndFunctionState(streamTaskState));
HashMap<String, KvStateSnapshot<?, ?, ?, ?>> oldKeyedState = streamTaskState.getKvStates();
if (null != oldKeyedState) {
Preconditions.checkState(null == newKeyedState, "Found more than one keyed state in chain");
newKeyedState = convertKeyedBackendState(oldKeyedState, parallelInstanceIdx, checkpointID);
}
}
ChainedStateHandle<StreamStateHandle> newChainedState = new ChainedStateHandle<>(newChainStateList);
ChainedStateHandle<OperatorStateHandle> nopChain = new ChainedStateHandle<>(Arrays.asList(new OperatorStateHandle[newChainedState.getLength()]));
return new org.apache.flink.runtime.checkpoint.SubtaskState(newChainedState, nopChain, nopChain, newKeyedState, null);
}
use of org.apache.flink.migration.runtime.checkpoint.SubtaskState in project flink by apache.
the class SavepointV0Serializer method convertTaskState.
private org.apache.flink.runtime.checkpoint.TaskState convertTaskState(TaskState taskState, ClassLoader userClassLoader, long checkpointID) throws Exception {
JobVertexID jobVertexID = taskState.getJobVertexID();
int parallelism = taskState.getParallelism();
int chainLength = determineOperatorChainLength(taskState, userClassLoader);
org.apache.flink.runtime.checkpoint.TaskState newTaskState = new org.apache.flink.runtime.checkpoint.TaskState(jobVertexID, parallelism, parallelism, chainLength);
if (chainLength > 0) {
Map<Integer, SubtaskState> subtaskStates = taskState.getSubtaskStatesById();
for (Map.Entry<Integer, SubtaskState> subtaskState : subtaskStates.entrySet()) {
int parallelInstanceIdx = subtaskState.getKey();
newTaskState.putState(parallelInstanceIdx, convertSubtaskState(subtaskState.getValue(), parallelInstanceIdx, userClassLoader, checkpointID));
}
}
return newTaskState;
}
use of org.apache.flink.migration.runtime.checkpoint.SubtaskState in project flink by apache.
the class SavepointV0Serializer method serializeOld.
@VisibleForTesting
public void serializeOld(SavepointV0 savepoint, DataOutputStream dos) throws IOException {
dos.writeLong(savepoint.getCheckpointId());
Collection<org.apache.flink.migration.runtime.checkpoint.TaskState> taskStates = savepoint.getOldTaskStates();
dos.writeInt(taskStates.size());
for (org.apache.flink.migration.runtime.checkpoint.TaskState taskState : savepoint.getOldTaskStates()) {
// Vertex ID
dos.writeLong(taskState.getJobVertexID().getLowerPart());
dos.writeLong(taskState.getJobVertexID().getUpperPart());
// Parallelism
int parallelism = taskState.getParallelism();
dos.writeInt(parallelism);
// Sub task states
dos.writeInt(taskState.getNumberCollectedStates());
for (int i = 0; i < parallelism; i++) {
SubtaskState subtaskState = taskState.getState(i);
if (subtaskState != null) {
dos.writeInt(i);
SerializedValue<?> serializedValue = subtaskState.getState();
if (serializedValue == null) {
// null
dos.writeInt(-1);
} else {
byte[] serialized = serializedValue.getByteArray();
dos.writeInt(serialized.length);
dos.write(serialized, 0, serialized.length);
}
dos.writeLong(subtaskState.getStateSize());
dos.writeLong(subtaskState.getDuration());
}
}
// Key group states
dos.writeInt(taskState.getNumberCollectedKvStates());
for (int i = 0; i < parallelism; i++) {
KeyGroupState keyGroupState = taskState.getKvState(i);
if (keyGroupState != null) {
dos.write(i);
SerializedValue<?> serializedValue = keyGroupState.getKeyGroupState();
if (serializedValue == null) {
// null
dos.writeInt(-1);
} else {
byte[] serialized = serializedValue.getByteArray();
dos.writeInt(serialized.length);
dos.write(serialized, 0, serialized.length);
}
dos.writeLong(keyGroupState.getStateSize());
dos.writeLong(keyGroupState.getDuration());
}
}
}
}
use of org.apache.flink.migration.runtime.checkpoint.SubtaskState in project flink by apache.
the class SavepointV0Serializer method deserialize.
@Override
public SavepointV1 deserialize(DataInputStream dis, ClassLoader userClassLoader) throws IOException {
long checkpointId = dis.readLong();
// Task states
int numTaskStates = dis.readInt();
List<TaskState> taskStates = new ArrayList<>(numTaskStates);
for (int i = 0; i < numTaskStates; i++) {
JobVertexID jobVertexId = new JobVertexID(dis.readLong(), dis.readLong());
int parallelism = dis.readInt();
// Add task state
TaskState taskState = new TaskState(jobVertexId, parallelism);
taskStates.add(taskState);
// Sub task states
int numSubTaskStates = dis.readInt();
for (int j = 0; j < numSubTaskStates; j++) {
int subtaskIndex = dis.readInt();
SerializedValue<StateHandle<?>> serializedValue = readSerializedValueStateHandle(dis);
long stateSize = dis.readLong();
long duration = dis.readLong();
SubtaskState subtaskState = new SubtaskState(serializedValue, stateSize, duration);
taskState.putState(subtaskIndex, subtaskState);
}
// Key group states
int numKvStates = dis.readInt();
for (int j = 0; j < numKvStates; j++) {
int keyGroupIndex = dis.readInt();
SerializedValue<StateHandle<?>> serializedValue = readSerializedValueStateHandle(dis);
long stateSize = dis.readLong();
long duration = dis.readLong();
KeyGroupState keyGroupState = new KeyGroupState(serializedValue, stateSize, duration);
taskState.putKvState(keyGroupIndex, keyGroupState);
}
}
try {
return convertSavepoint(taskStates, userClassLoader, checkpointId);
} catch (Exception e) {
throw new IOException(e);
}
}
Aggregations