use of org.apache.flink.runtime.util.TestByteStreamStateHandleDeepCompare in project flink by apache.
the class CheckpointCoordinatorTest method generateChainedPartitionableStateHandle.
private static ChainedStateHandle<OperatorStateHandle> generateChainedPartitionableStateHandle(Map<String, List<? extends Serializable>> states) throws IOException {
List<List<? extends Serializable>> namedStateSerializables = new ArrayList<>(states.size());
for (Map.Entry<String, List<? extends Serializable>> entry : states.entrySet()) {
namedStateSerializables.add(entry.getValue());
}
Tuple2<byte[], List<long[]>> serializationWithOffsets = serializeTogetherAndTrackOffsets(namedStateSerializables);
Map<String, OperatorStateHandle.StateMetaInfo> offsetsMap = new HashMap<>(states.size());
int idx = 0;
for (Map.Entry<String, List<? extends Serializable>> entry : states.entrySet()) {
offsetsMap.put(entry.getKey(), new OperatorStateHandle.StateMetaInfo(serializationWithOffsets.f1.get(idx), OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
++idx;
}
ByteStreamStateHandle streamStateHandle = new TestByteStreamStateHandleDeepCompare(String.valueOf(UUID.randomUUID()), serializationWithOffsets.f0);
OperatorStateHandle operatorStateHandle = new OperatorStateHandle(offsetsMap, streamStateHandle);
return ChainedStateHandle.wrapSingleHandle(operatorStateHandle);
}
use of org.apache.flink.runtime.util.TestByteStreamStateHandleDeepCompare in project flink by apache.
the class SavepointV1Test method createTaskStates.
static Collection<TaskState> createTaskStates(int numTaskStates, int numSubtasksPerTask) throws IOException {
Random random = new Random(numTaskStates * 31 + numSubtasksPerTask);
List<TaskState> taskStates = new ArrayList<>(numTaskStates);
for (int stateIdx = 0; stateIdx < numTaskStates; ++stateIdx) {
int chainLength = 1 + random.nextInt(8);
TaskState taskState = new TaskState(new JobVertexID(), numSubtasksPerTask, 128, chainLength);
int noNonPartitionableStateAtIndex = random.nextInt(chainLength);
int noOperatorStateBackendAtIndex = random.nextInt(chainLength);
int noOperatorStateStreamAtIndex = random.nextInt(chainLength);
boolean hasKeyedBackend = random.nextInt(4) != 0;
boolean hasKeyedStream = random.nextInt(4) != 0;
for (int subtaskIdx = 0; subtaskIdx < numSubtasksPerTask; subtaskIdx++) {
List<StreamStateHandle> nonPartitionableStates = new ArrayList<>(chainLength);
List<OperatorStateHandle> operatorStatesBackend = new ArrayList<>(chainLength);
List<OperatorStateHandle> operatorStatesStream = new ArrayList<>(chainLength);
for (int chainIdx = 0; chainIdx < chainLength; ++chainIdx) {
StreamStateHandle nonPartitionableState = new TestByteStreamStateHandleDeepCompare("a-" + chainIdx, ("Hi-" + chainIdx).getBytes(ConfigConstants.DEFAULT_CHARSET));
StreamStateHandle operatorStateBackend = new TestByteStreamStateHandleDeepCompare("b-" + chainIdx, ("Beautiful-" + chainIdx).getBytes(ConfigConstants.DEFAULT_CHARSET));
StreamStateHandle operatorStateStream = new TestByteStreamStateHandleDeepCompare("b-" + chainIdx, ("Beautiful-" + chainIdx).getBytes(ConfigConstants.DEFAULT_CHARSET));
Map<String, OperatorStateHandle.StateMetaInfo> offsetsMap = new HashMap<>();
offsetsMap.put("A", new OperatorStateHandle.StateMetaInfo(new long[] { 0, 10, 20 }, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
offsetsMap.put("B", new OperatorStateHandle.StateMetaInfo(new long[] { 30, 40, 50 }, OperatorStateHandle.Mode.SPLIT_DISTRIBUTE));
offsetsMap.put("C", new OperatorStateHandle.StateMetaInfo(new long[] { 60, 70, 80 }, OperatorStateHandle.Mode.BROADCAST));
if (chainIdx != noNonPartitionableStateAtIndex) {
nonPartitionableStates.add(nonPartitionableState);
}
if (chainIdx != noOperatorStateBackendAtIndex) {
OperatorStateHandle operatorStateHandleBackend = new OperatorStateHandle(offsetsMap, operatorStateBackend);
operatorStatesBackend.add(operatorStateHandleBackend);
}
if (chainIdx != noOperatorStateStreamAtIndex) {
OperatorStateHandle operatorStateHandleStream = new OperatorStateHandle(offsetsMap, operatorStateStream);
operatorStatesStream.add(operatorStateHandleStream);
}
}
KeyGroupsStateHandle keyedStateBackend = null;
KeyGroupsStateHandle keyedStateStream = null;
if (hasKeyedBackend) {
keyedStateBackend = new KeyGroupsStateHandle(new KeyGroupRangeOffsets(1, 1, new long[] { 42 }), new TestByteStreamStateHandleDeepCompare("c", "Hello".getBytes(ConfigConstants.DEFAULT_CHARSET)));
}
if (hasKeyedStream) {
keyedStateStream = new KeyGroupsStateHandle(new KeyGroupRangeOffsets(1, 1, new long[] { 23 }), new TestByteStreamStateHandleDeepCompare("d", "World".getBytes(ConfigConstants.DEFAULT_CHARSET)));
}
taskState.putState(subtaskIdx, new SubtaskState(new ChainedStateHandle<>(nonPartitionableStates), new ChainedStateHandle<>(operatorStatesBackend), new ChainedStateHandle<>(operatorStatesStream), keyedStateStream, keyedStateBackend));
}
taskStates.add(taskState);
}
return taskStates;
}
use of org.apache.flink.runtime.util.TestByteStreamStateHandleDeepCompare in project flink by apache.
the class CheckpointCoordinatorTest method generateKeyGroupState.
public static KeyGroupsStateHandle generateKeyGroupState(KeyGroupRange keyGroupRange, List<? extends Serializable> states) throws IOException {
Preconditions.checkArgument(keyGroupRange.getNumberOfKeyGroups() == states.size());
Tuple2<byte[], List<long[]>> serializedDataWithOffsets = serializeTogetherAndTrackOffsets(Collections.<List<? extends Serializable>>singletonList(states));
KeyGroupRangeOffsets keyGroupRangeOffsets = new KeyGroupRangeOffsets(keyGroupRange, serializedDataWithOffsets.f1.get(0));
ByteStreamStateHandle allSerializedStatesHandle = new TestByteStreamStateHandleDeepCompare(String.valueOf(UUID.randomUUID()), serializedDataWithOffsets.f0);
KeyGroupsStateHandle keyGroupsStateHandle = new KeyGroupsStateHandle(keyGroupRangeOffsets, allSerializedStatesHandle);
return keyGroupsStateHandle;
}
Aggregations