use of org.apache.flink.core.memory.DataInputDeserializer in project flink by apache.
the class CopyOnWriteSkipListStateMapSnapshot method getIterator.
@Override
public Iterator<StateEntry<K, N, S>> getIterator(@Nonnull TypeSerializer<K> keySerializer, @Nonnull TypeSerializer<N> namespaceSerializer, @Nonnull TypeSerializer<S> stateSerializer, @Nullable StateSnapshotTransformer<S> stateSnapshotTransformer) {
SkipListValueSerializer<S> skipListValueSerializer = new SkipListValueSerializer<>(stateSerializer);
DataInputDeserializer inputDeserializer = new DataInputDeserializer();
// 1. iterates nodes to get size after transform
Iterator<Tuple2<Long, Long>> transformNodeIterator = new SnapshotNodeIterator(true);
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(transformNodeIterator, 0), false).map(tuple -> transformEntry(keySerializer, namespaceSerializer, stateSnapshotTransformer, skipListValueSerializer, inputDeserializer, tuple)).filter(Objects::nonNull).iterator();
}
use of org.apache.flink.core.memory.DataInputDeserializer in project flink by apache.
the class RawType method restore.
// --------------------------------------------------------------------------------------------
/**
* Restores a raw type from the components of a serialized string representation.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static RawType<?> restore(ClassLoader classLoader, String className, String serializerString) {
try {
final Class<?> clazz = Class.forName(className, true, classLoader);
final byte[] bytes = EncodingUtils.decodeBase64ToBytes(serializerString);
final DataInputDeserializer inputDeserializer = new DataInputDeserializer(bytes);
final TypeSerializerSnapshot<?> snapshot = TypeSerializerSnapshot.readVersionedSnapshot(inputDeserializer, classLoader);
return (RawType<?>) new RawType(clazz, snapshot.restoreSerializer());
} catch (Throwable t) {
throw new ValidationException(String.format("Unable to restore the RAW type of class '%s' with serializer snapshot '%s'.", className, serializerString), t);
}
}
use of org.apache.flink.core.memory.DataInputDeserializer in project flink by apache.
the class SerializedCompositeKeyBuilderTest method testSetKeyInternal.
private <K> void testSetKeyInternal(TypeSerializer<K> serializer, Collection<K> testKeys, int maxParallelism) throws IOException {
final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;
SerializedCompositeKeyBuilder<K> keyBuilder = createRocksDBSerializedCompositeKeyBuilder(serializer, prefixBytes);
final DataInputDeserializer deserializer = new DataInputDeserializer();
for (K testKey : testKeys) {
int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
byte[] result = dataOutputSerializer.getCopyOfBuffer();
deserializer.setBuffer(result);
assertKeyKeyGroupBytes(testKey, keyGroup, prefixBytes, serializer, deserializer, false);
Assert.assertEquals(0, deserializer.available());
}
}
use of org.apache.flink.core.memory.DataInputDeserializer in project flink by apache.
the class SerializedCompositeKeyBuilderTest method testSetKeyNamespaceInternal.
private <K, N> void testSetKeyNamespaceInternal(TypeSerializer<K> keySerializer, TypeSerializer<N> namespaceSerializer, Collection<K> testKeys, Collection<N> testNamespaces, int maxParallelism, BuildKeyAndNamespaceType buildKeyAndNamespaceType) throws IOException {
final int prefixBytes = maxParallelism > Byte.MAX_VALUE ? 2 : 1;
SerializedCompositeKeyBuilder<K> keyBuilder = createRocksDBSerializedCompositeKeyBuilder(keySerializer, prefixBytes);
final DataInputDeserializer deserializer = new DataInputDeserializer();
final boolean ambiguousPossible = keyBuilder.isAmbiguousCompositeKeyPossible(namespaceSerializer);
for (K testKey : testKeys) {
int keyGroup = setKeyAndReturnKeyGroup(keyBuilder, testKey, maxParallelism);
for (N testNamespace : testNamespaces) {
final byte[] compositeBytes;
if (buildKeyAndNamespaceType == BuildKeyAndNamespaceType.BUILD) {
compositeBytes = keyBuilder.buildCompositeKeyNamespace(testNamespace, namespaceSerializer);
} else {
keyBuilder.setNamespace(testNamespace, namespaceSerializer);
compositeBytes = keyBuilder.build();
}
deserializer.setBuffer(compositeBytes);
assertKeyGroupKeyNamespaceBytes(testKey, keyGroup, prefixBytes, keySerializer, testNamespace, namespaceSerializer, deserializer, ambiguousPossible);
Assert.assertEquals(0, deserializer.available());
}
}
}
use of org.apache.flink.core.memory.DataInputDeserializer in project flink by apache.
the class CommittableMessageSerializer method deserialize.
@Override
public CommittableMessage<CommT> deserialize(int version, byte[] serialized) throws IOException {
DataInputDeserializer in = new DataInputDeserializer(serialized);
byte messageType = in.readByte();
switch(messageType) {
case COMMITTABLE:
return new CommittableWithLineage<>(SimpleVersionedSerialization.readVersionAndDeSerialize(committableSerializer, in), readCheckpointId(in), in.readInt());
case SUMMARY:
return new CommittableSummary<>(in.readInt(), in.readInt(), readCheckpointId(in), in.readInt(), in.readInt(), in.readInt());
default:
throw new IllegalStateException("Unexpected message type " + messageType + " in " + StringUtils.byteToHexString(serialized));
}
}
Aggregations