use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class StreamElementSerializerTest method serializeAndDeserialize.
@SuppressWarnings("unchecked")
private static <T, X extends StreamElement> X serializeAndDeserialize(X record, StreamElementSerializer<T> serializer) throws IOException {
DataOutputSerializer output = new DataOutputSerializer(32);
serializer.serialize(record, output);
// additional binary copy step
DataInputDeserializer copyInput = new DataInputDeserializer(output.getByteArray(), 0, output.length());
DataOutputSerializer copyOutput = new DataOutputSerializer(32);
serializer.copy(copyInput, copyOutput);
DataInputDeserializer input = new DataInputDeserializer(copyOutput.getByteArray(), 0, copyOutput.length());
return (X) serializer.deserialize(input);
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class SpanningWrapper method copyDataBuffer.
private MemorySegment copyDataBuffer() throws IOException {
int leftOverSize = leftOverLimit - leftOverStart;
int unconsumedSize = LENGTH_BYTES + accumulatedRecordBytes + leftOverSize;
DataOutputSerializer serializer = new DataOutputSerializer(unconsumedSize);
serializer.writeInt(recordLength);
serializer.write(buffer, 0, accumulatedRecordBytes);
if (leftOverData != null) {
serializer.write(leftOverData, leftOverStart, leftOverSize);
}
MemorySegment segment = MemorySegmentFactory.allocateUnpooledSegment(unconsumedSize);
segment.put(0, serializer.getSharedBuffer(), 0, segment.size());
return segment;
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class SpanningRecordSerializationTest method testSerializationRoundTrip.
/**
* Iterates over the provided records and tests whether {@link RecordWriter#serializeRecord} and
* {@link RecordDeserializer} interact as expected.
*
* <p>Only a single {@link MemorySegment} will be allocated.
*
* @param records records to test
* @param segmentSize size for the {@link MemorySegment}
*/
private static void testSerializationRoundTrip(Iterable<SerializationTestType> records, int segmentSize, RecordDeserializer<SerializationTestType> deserializer) throws Exception {
final DataOutputSerializer serializer = new DataOutputSerializer(128);
final ArrayDeque<SerializationTestType> serializedRecords = new ArrayDeque<>();
// -------------------------------------------------------------------------------------------------------------
BufferAndSerializerResult serializationResult = setNextBufferForSerializer(serializer.wrapAsByteBuffer(), segmentSize);
int numRecords = 0;
for (SerializationTestType record : records) {
serializedRecords.add(record);
numRecords++;
// serialize record
serializer.clear();
ByteBuffer serializedRecord = RecordWriter.serializeRecord(serializer, record);
serializationResult.getBufferBuilder().appendAndCommit(serializedRecord);
if (serializationResult.getBufferBuilder().isFull()) {
// buffer is full => start deserializing
deserializer.setNextBuffer(serializationResult.buildBuffer());
numRecords -= DeserializationUtils.deserializeRecords(serializedRecords, deserializer);
// move buffers as long as necessary (for long records)
while ((serializationResult = setNextBufferForSerializer(serializedRecord, segmentSize)).isFullBuffer()) {
deserializer.setNextBuffer(serializationResult.buildBuffer());
}
}
Assert.assertFalse(serializedRecord.hasRemaining());
}
// deserialize left over records
deserializer.setNextBuffer(serializationResult.buildBuffer());
while (!serializedRecords.isEmpty()) {
SerializationTestType expected = serializedRecords.poll();
SerializationTestType actual = expected.getClass().newInstance();
RecordDeserializer.DeserializationResult result = deserializer.getNextRecord(actual);
Assert.assertTrue(result.isFullRecord());
Assert.assertEquals(expected, actual);
numRecords--;
}
// assert that all records have been serialized and deserialized
Assert.assertEquals(0, numRecords);
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class SpanningRecordSerializationTest method testUnconsumedBuffer.
public void testUnconsumedBuffer(RecordDeserializer<SerializationTestType> deserializer, SerializationTestType record, int segmentSize, byte... leftOverBytes) throws Exception {
try (ByteArrayOutputStream unconsumedBytes = new ByteArrayOutputStream()) {
DataOutputSerializer serializer = new DataOutputSerializer(128);
ByteBuffer serializedRecord = RecordWriter.serializeRecord(serializer, record);
BufferAndSerializerResult serializationResult = setNextBufferForSerializer(serializedRecord, segmentSize);
serializationResult.getBufferBuilder().appendAndCommit(serializedRecord);
if (serializationResult.getBufferBuilder().isFull()) {
// buffer is full => start deserializing
Buffer buffer = serializationResult.buildBuffer();
writeBuffer(buffer.readOnlySlice().getNioBufferReadable(), unconsumedBytes);
deserializer.setNextBuffer(buffer);
assertUnconsumedBuffer(unconsumedBytes, deserializer.getUnconsumedBuffer());
deserializer.getNextRecord(record.getClass().newInstance());
// move buffers as long as necessary (for long records)
while ((serializationResult = setNextBufferForSerializer(serializedRecord, segmentSize)).isFullBuffer()) {
buffer = serializationResult.buildBuffer();
if (serializationResult.isFullRecord()) {
buffer = appendLeftOverBytes(buffer, leftOverBytes);
}
writeBuffer(buffer.readOnlySlice().getNioBufferReadable(), unconsumedBytes);
deserializer.setNextBuffer(buffer);
assertUnconsumedBuffer(unconsumedBytes, deserializer.getUnconsumedBuffer());
deserializer.getNextRecord(record.getClass().newInstance());
}
}
}
}
use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.
the class CheckpointBarrierTest method testSerialization.
/**
* Test serialization of the checkpoint barrier. The checkpoint barrier does not support its own
* serialization, in order to be immutable.
*/
@Test
public void testSerialization() throws Exception {
long id = Integer.MAX_VALUE + 123123L;
long timestamp = Integer.MAX_VALUE + 1228L;
CheckpointOptions options = CheckpointOptions.forCheckpointWithDefaultLocation();
CheckpointBarrier barrier = new CheckpointBarrier(id, timestamp, options);
try {
barrier.write(new DataOutputSerializer(1024));
fail("should throw an exception");
} catch (UnsupportedOperationException e) {
// expected
}
try {
barrier.read(new DataInputDeserializer(new byte[32]));
fail("should throw an exception");
} catch (UnsupportedOperationException e) {
// expected
}
}
Aggregations