Search in sources :

Example 51 with DataOutputSerializer

use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.

the class StreamElementSerializerTest method serializeAndDeserialize.

@SuppressWarnings("unchecked")
private static <T, X extends StreamElement> X serializeAndDeserialize(X record, StreamElementSerializer<T> serializer) throws IOException {
    DataOutputSerializer output = new DataOutputSerializer(32);
    serializer.serialize(record, output);
    // additional binary copy step
    DataInputDeserializer copyInput = new DataInputDeserializer(output.getByteArray(), 0, output.length());
    DataOutputSerializer copyOutput = new DataOutputSerializer(32);
    serializer.copy(copyInput, copyOutput);
    DataInputDeserializer input = new DataInputDeserializer(copyOutput.getByteArray(), 0, copyOutput.length());
    return (X) serializer.deserialize(input);
}
Also used : DataOutputSerializer(org.apache.flink.core.memory.DataOutputSerializer) DataInputDeserializer(org.apache.flink.core.memory.DataInputDeserializer)

Example 52 with DataOutputSerializer

use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.

the class SpanningWrapper method copyDataBuffer.

private MemorySegment copyDataBuffer() throws IOException {
    int leftOverSize = leftOverLimit - leftOverStart;
    int unconsumedSize = LENGTH_BYTES + accumulatedRecordBytes + leftOverSize;
    DataOutputSerializer serializer = new DataOutputSerializer(unconsumedSize);
    serializer.writeInt(recordLength);
    serializer.write(buffer, 0, accumulatedRecordBytes);
    if (leftOverData != null) {
        serializer.write(leftOverData, leftOverStart, leftOverSize);
    }
    MemorySegment segment = MemorySegmentFactory.allocateUnpooledSegment(unconsumedSize);
    segment.put(0, serializer.getSharedBuffer(), 0, segment.size());
    return segment;
}
Also used : DataOutputSerializer(org.apache.flink.core.memory.DataOutputSerializer) MemorySegment(org.apache.flink.core.memory.MemorySegment)

Example 53 with DataOutputSerializer

use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.

the class SpanningRecordSerializationTest method testSerializationRoundTrip.

/**
 * Iterates over the provided records and tests whether {@link RecordWriter#serializeRecord} and
 * {@link RecordDeserializer} interact as expected.
 *
 * <p>Only a single {@link MemorySegment} will be allocated.
 *
 * @param records records to test
 * @param segmentSize size for the {@link MemorySegment}
 */
private static void testSerializationRoundTrip(Iterable<SerializationTestType> records, int segmentSize, RecordDeserializer<SerializationTestType> deserializer) throws Exception {
    final DataOutputSerializer serializer = new DataOutputSerializer(128);
    final ArrayDeque<SerializationTestType> serializedRecords = new ArrayDeque<>();
    // -------------------------------------------------------------------------------------------------------------
    BufferAndSerializerResult serializationResult = setNextBufferForSerializer(serializer.wrapAsByteBuffer(), segmentSize);
    int numRecords = 0;
    for (SerializationTestType record : records) {
        serializedRecords.add(record);
        numRecords++;
        // serialize record
        serializer.clear();
        ByteBuffer serializedRecord = RecordWriter.serializeRecord(serializer, record);
        serializationResult.getBufferBuilder().appendAndCommit(serializedRecord);
        if (serializationResult.getBufferBuilder().isFull()) {
            // buffer is full => start deserializing
            deserializer.setNextBuffer(serializationResult.buildBuffer());
            numRecords -= DeserializationUtils.deserializeRecords(serializedRecords, deserializer);
            // move buffers as long as necessary (for long records)
            while ((serializationResult = setNextBufferForSerializer(serializedRecord, segmentSize)).isFullBuffer()) {
                deserializer.setNextBuffer(serializationResult.buildBuffer());
            }
        }
        Assert.assertFalse(serializedRecord.hasRemaining());
    }
    // deserialize left over records
    deserializer.setNextBuffer(serializationResult.buildBuffer());
    while (!serializedRecords.isEmpty()) {
        SerializationTestType expected = serializedRecords.poll();
        SerializationTestType actual = expected.getClass().newInstance();
        RecordDeserializer.DeserializationResult result = deserializer.getNextRecord(actual);
        Assert.assertTrue(result.isFullRecord());
        Assert.assertEquals(expected, actual);
        numRecords--;
    }
    // assert that all records have been serialized and deserialized
    Assert.assertEquals(0, numRecords);
}
Also used : DataOutputSerializer(org.apache.flink.core.memory.DataOutputSerializer) SerializationTestType(org.apache.flink.testutils.serialization.types.SerializationTestType) ByteBuffer(java.nio.ByteBuffer) ArrayDeque(java.util.ArrayDeque)

Example 54 with DataOutputSerializer

use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.

the class SpanningRecordSerializationTest method testUnconsumedBuffer.

public void testUnconsumedBuffer(RecordDeserializer<SerializationTestType> deserializer, SerializationTestType record, int segmentSize, byte... leftOverBytes) throws Exception {
    try (ByteArrayOutputStream unconsumedBytes = new ByteArrayOutputStream()) {
        DataOutputSerializer serializer = new DataOutputSerializer(128);
        ByteBuffer serializedRecord = RecordWriter.serializeRecord(serializer, record);
        BufferAndSerializerResult serializationResult = setNextBufferForSerializer(serializedRecord, segmentSize);
        serializationResult.getBufferBuilder().appendAndCommit(serializedRecord);
        if (serializationResult.getBufferBuilder().isFull()) {
            // buffer is full => start deserializing
            Buffer buffer = serializationResult.buildBuffer();
            writeBuffer(buffer.readOnlySlice().getNioBufferReadable(), unconsumedBytes);
            deserializer.setNextBuffer(buffer);
            assertUnconsumedBuffer(unconsumedBytes, deserializer.getUnconsumedBuffer());
            deserializer.getNextRecord(record.getClass().newInstance());
            // move buffers as long as necessary (for long records)
            while ((serializationResult = setNextBufferForSerializer(serializedRecord, segmentSize)).isFullBuffer()) {
                buffer = serializationResult.buildBuffer();
                if (serializationResult.isFullRecord()) {
                    buffer = appendLeftOverBytes(buffer, leftOverBytes);
                }
                writeBuffer(buffer.readOnlySlice().getNioBufferReadable(), unconsumedBytes);
                deserializer.setNextBuffer(buffer);
                assertUnconsumedBuffer(unconsumedBytes, deserializer.getUnconsumedBuffer());
                deserializer.getNextRecord(record.getClass().newInstance());
            }
        }
    }
}
Also used : ByteBuffer(java.nio.ByteBuffer) Buffer(org.apache.flink.runtime.io.network.buffer.Buffer) BufferBuilderTestUtils.buildSingleBuffer(org.apache.flink.runtime.io.network.buffer.BufferBuilderTestUtils.buildSingleBuffer) DataOutputSerializer(org.apache.flink.core.memory.DataOutputSerializer) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer)

Example 55 with DataOutputSerializer

use of org.apache.flink.core.memory.DataOutputSerializer in project flink by apache.

the class CheckpointBarrierTest method testSerialization.

/**
 * Test serialization of the checkpoint barrier. The checkpoint barrier does not support its own
 * serialization, in order to be immutable.
 */
@Test
public void testSerialization() throws Exception {
    long id = Integer.MAX_VALUE + 123123L;
    long timestamp = Integer.MAX_VALUE + 1228L;
    CheckpointOptions options = CheckpointOptions.forCheckpointWithDefaultLocation();
    CheckpointBarrier barrier = new CheckpointBarrier(id, timestamp, options);
    try {
        barrier.write(new DataOutputSerializer(1024));
        fail("should throw an exception");
    } catch (UnsupportedOperationException e) {
    // expected
    }
    try {
        barrier.read(new DataInputDeserializer(new byte[32]));
        fail("should throw an exception");
    } catch (UnsupportedOperationException e) {
    // expected
    }
}
Also used : DataOutputSerializer(org.apache.flink.core.memory.DataOutputSerializer) CheckpointOptions(org.apache.flink.runtime.checkpoint.CheckpointOptions) DataInputDeserializer(org.apache.flink.core.memory.DataInputDeserializer) Test(org.junit.Test)

Aggregations

DataOutputSerializer (org.apache.flink.core.memory.DataOutputSerializer)63 DataInputDeserializer (org.apache.flink.core.memory.DataInputDeserializer)15 Test (org.junit.Test)15 IOException (java.io.IOException)10 ByteBuffer (java.nio.ByteBuffer)6 List (java.util.List)4 IntSerializer (org.apache.flink.api.common.typeutils.base.IntSerializer)4 StringSerializer (org.apache.flink.api.common.typeutils.base.StringSerializer)4 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)4 ArrayList (java.util.ArrayList)3 IntStream (java.util.stream.IntStream)3 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)3 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)3 Comparator (java.util.Comparator)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 Function (java.util.function.Function)2 Stream (java.util.stream.Stream)2 ValueState (org.apache.flink.api.common.state.ValueState)2 ValueStateDescriptor (org.apache.flink.api.common.state.ValueStateDescriptor)2