use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class TestManagedFileSourceSplitSerializer method serialize.
@Override
public byte[] serialize(TestManagedIterableSourceSplit split) throws IOException {
final DataOutputSerializer out = new DataOutputSerializer(64);
out.writeUTF(split.splitId());
split.getFilePath().write(out);
final byte[] result = out.getCopyOfBuffer();
out.clear();
return result;
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class RawType method getSerializerString.
/**
* Returns the serialized {@link TypeSerializerSnapshot} in Base64 encoding of this raw type.
*/
public String getSerializerString() {
if (serializerString == null) {
final DataOutputSerializer outputSerializer = new DataOutputSerializer(128);
try {
TypeSerializerSnapshot.writeVersionedSnapshot(outputSerializer, serializer.snapshotConfiguration());
serializerString = EncodingUtils.encodeBytesToBase64(outputSerializer.getCopyOfBuffer());
return serializerString;
} catch (Exception e) {
throw new TableException(String.format("Unable to generate a string representation of the serializer snapshot of '%s' " + "describing the class '%s' for the RAW type.", serializer.getClass().getName(), clazz.toString()), e);
}
}
return serializerString;
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class SourceCoordinatorTest method createCheckpointDataWithSerdeV0.
// ------------------------------------------------------------------------
// test helpers
// ------------------------------------------------------------------------
private byte[] createCheckpointDataWithSerdeV0(Set<MockSourceSplit> splits) throws Exception {
final MockSplitEnumeratorCheckpointSerializer enumChkptSerializer = new MockSplitEnumeratorCheckpointSerializer();
final DataOutputSerializer serializer = new DataOutputSerializer(32);
serializer.writeInt(SourceCoordinatorSerdeUtils.VERSION_0);
serializer.writeInt(enumChkptSerializer.getVersion());
final byte[] serializedEnumChkpt = enumChkptSerializer.serialize(splits);
serializer.writeInt(serializedEnumChkpt.length);
serializer.write(serializedEnumChkpt);
// Version 0 wrote number of reader, see FLINK-21452
serializer.writeInt(0);
// Version 0 wrote split assignment tracker
// SplitSerializer version used in assignment tracker
serializer.writeInt(0);
// Number of checkpoint in assignment tracker
serializer.writeInt(0);
return serializer.getCopyOfBuffer();
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class RocksDBRocksStateKeysAndNamespacesIteratorTest method testIteratorHelper.
@SuppressWarnings("unchecked")
<K> void testIteratorHelper(TypeSerializer<K> keySerializer, int maxKeyGroupNumber, Function<Integer, K> getKeyFunc) throws Exception {
String testStateName = "aha";
String namespace = "ns";
try (RocksDBKeyedStateBackendTestFactory factory = new RocksDBKeyedStateBackendTestFactory()) {
RocksDBKeyedStateBackend<K> keyedStateBackend = factory.create(tmp, keySerializer, maxKeyGroupNumber);
ValueState<String> testState = keyedStateBackend.getPartitionedState(namespace, StringSerializer.INSTANCE, new ValueStateDescriptor<>(testStateName, String.class));
// insert record
for (int i = 0; i < 1000; ++i) {
keyedStateBackend.setCurrentKey(getKeyFunc.apply(i));
testState.update(String.valueOf(i));
}
DataOutputSerializer outputStream = new DataOutputSerializer(8);
boolean ambiguousKeyPossible = CompositeKeySerializationUtils.isAmbiguousKeyPossible(keySerializer, StringSerializer.INSTANCE);
CompositeKeySerializationUtils.writeNameSpace(namespace, StringSerializer.INSTANCE, outputStream, ambiguousKeyPossible);
// already created with the state, should be closed with the backend
ColumnFamilyHandle handle = keyedStateBackend.getColumnFamilyHandle(testStateName);
try (RocksIteratorWrapper iterator = RocksDBOperationUtils.getRocksIterator(keyedStateBackend.db, handle, keyedStateBackend.getReadOptions());
RocksStateKeysAndNamespaceIterator<K, String> iteratorWrapper = new RocksStateKeysAndNamespaceIterator<>(iterator, testStateName, keySerializer, StringSerializer.INSTANCE, keyedStateBackend.getKeyGroupPrefixBytes(), ambiguousKeyPossible)) {
iterator.seekToFirst();
// valid record
List<Tuple2<Integer, String>> fetchedKeys = new ArrayList<>(1000);
while (iteratorWrapper.hasNext()) {
Tuple2 entry = iteratorWrapper.next();
entry.f0 = Integer.parseInt(entry.f0.toString());
fetchedKeys.add((Tuple2<Integer, String>) entry);
}
fetchedKeys.sort(Comparator.comparingInt(a -> a.f0));
Assert.assertEquals(1000, fetchedKeys.size());
for (int i = 0; i < 1000; ++i) {
Assert.assertEquals(i, fetchedKeys.get(i).f0.intValue());
Assert.assertEquals(namespace, fetchedKeys.get(i).f1);
}
}
}
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class RocksDBRocksStateKeysIteratorTest method testIteratorHelper.
<K> void testIteratorHelper(TypeSerializer<K> keySerializer, int maxKeyGroupNumber, Function<Integer, K> getKeyFunc) throws Exception {
String testStateName = "aha";
String namespace = "ns";
try (RocksDBKeyedStateBackendTestFactory factory = new RocksDBKeyedStateBackendTestFactory()) {
RocksDBKeyedStateBackend<K> keyedStateBackend = factory.create(tmp, keySerializer, maxKeyGroupNumber);
ValueState<String> testState = keyedStateBackend.getPartitionedState(namespace, StringSerializer.INSTANCE, new ValueStateDescriptor<>(testStateName, String.class));
// insert record
for (int i = 0; i < 1000; ++i) {
keyedStateBackend.setCurrentKey(getKeyFunc.apply(i));
testState.update(String.valueOf(i));
}
DataOutputSerializer outputStream = new DataOutputSerializer(8);
boolean ambiguousKeyPossible = CompositeKeySerializationUtils.isAmbiguousKeyPossible(keySerializer, StringSerializer.INSTANCE);
CompositeKeySerializationUtils.writeNameSpace(namespace, StringSerializer.INSTANCE, outputStream, ambiguousKeyPossible);
byte[] nameSpaceBytes = outputStream.getCopyOfBuffer();
// already created with the state, should be closed with the backend
ColumnFamilyHandle handle = keyedStateBackend.getColumnFamilyHandle(testStateName);
try (RocksIteratorWrapper iterator = RocksDBOperationUtils.getRocksIterator(keyedStateBackend.db, handle, keyedStateBackend.getReadOptions());
RocksStateKeysIterator<K> iteratorWrapper = new RocksStateKeysIterator<>(iterator, testStateName, keySerializer, keyedStateBackend.getKeyGroupPrefixBytes(), ambiguousKeyPossible, nameSpaceBytes)) {
iterator.seekToFirst();
// valid record
List<Integer> fetchedKeys = new ArrayList<>(1000);
while (iteratorWrapper.hasNext()) {
fetchedKeys.add(Integer.parseInt(iteratorWrapper.next().toString()));
}
fetchedKeys.sort(Comparator.comparingInt(a -> a));
Assert.assertEquals(1000, fetchedKeys.size());
for (int i = 0; i < 1000; ++i) {
Assert.assertEquals(i, fetchedKeys.get(i).intValue());
}
}
}
}
Aggregations