Search in sources :

Example 6 with SerializationTestType

use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.

the class SpanningRecordSerializerTest method testHasData.

@Test
public void testHasData() {
    final int SEGMENT_SIZE = 16;
    final SpanningRecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
    final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
    final SerializationTestType randomIntRecord = Util.randomRecord(SerializationTestTypeFactory.INT);
    Assert.assertFalse(serializer.hasData());
    try {
        serializer.addRecord(randomIntRecord);
        Assert.assertTrue(serializer.hasData());
        serializer.setNextBuffer(buffer);
        Assert.assertTrue(serializer.hasData());
        serializer.clear();
        Assert.assertFalse(serializer.hasData());
        serializer.setNextBuffer(buffer);
        serializer.addRecord(randomIntRecord);
        Assert.assertTrue(serializer.hasData());
        serializer.addRecord(randomIntRecord);
        Assert.assertTrue(serializer.hasData());
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail(e.getMessage());
    }
}
Also used : Buffer(org.apache.flink.runtime.io.network.buffer.Buffer) SerializationTestType(org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType) BufferRecycler(org.apache.flink.runtime.io.network.buffer.BufferRecycler) IOException(java.io.IOException) Test(org.junit.Test)

Example 7 with SerializationTestType

use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.

the class SpanningRecordSerializerTest method testEmptyRecords.

@Test
public void testEmptyRecords() {
    final int SEGMENT_SIZE = 11;
    final SpanningRecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
    final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
    try {
        Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, serializer.setNextBuffer(buffer));
    } catch (IOException e) {
        e.printStackTrace();
    }
    try {
        SerializationTestType emptyRecord = new SerializationTestType() {

            @Override
            public SerializationTestType getRandom(Random rnd) {
                throw new UnsupportedOperationException();
            }

            @Override
            public int length() {
                throw new UnsupportedOperationException();
            }

            @Override
            public void write(DataOutputView out) {
            }

            @Override
            public void read(DataInputView in) {
            }

            @Override
            public int hashCode() {
                throw new UnsupportedOperationException();
            }

            @Override
            public boolean equals(Object obj) {
                throw new UnsupportedOperationException();
            }
        };
        RecordSerializer.SerializationResult result = serializer.addRecord(emptyRecord);
        Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, result);
        result = serializer.addRecord(emptyRecord);
        Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, result);
        result = serializer.addRecord(emptyRecord);
        Assert.assertEquals(RecordSerializer.SerializationResult.PARTIAL_RECORD_MEMORY_SEGMENT_FULL, result);
        result = serializer.setNextBuffer(buffer);
        Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, result);
    } catch (Exception e) {
        e.printStackTrace();
        Assert.fail(e.getMessage());
    }
}
Also used : Buffer(org.apache.flink.runtime.io.network.buffer.Buffer) DataInputView(org.apache.flink.core.memory.DataInputView) DataOutputView(org.apache.flink.core.memory.DataOutputView) IOException(java.io.IOException) IOException(java.io.IOException) SerializationTestType(org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType) Random(java.util.Random) BufferRecycler(org.apache.flink.runtime.io.network.buffer.BufferRecycler) Test(org.junit.Test)

Example 8 with SerializationTestType

use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.

the class DataInputOutputSerializerTest method testRandomValuesWriteRead.

@Test
public void testRandomValuesWriteRead() {
    final int numElements = 100000;
    final ArrayDeque<SerializationTestType> reference = new ArrayDeque<SerializationTestType>();
    DataOutputSerializer serializer = new DataOutputSerializer(1);
    for (SerializationTestType value : Util.randomRecords(numElements)) {
        reference.add(value);
        try {
            value.write(serializer);
        } catch (IOException e) {
            e.printStackTrace();
            Assert.fail("Test encountered an unexpected exception.");
        }
    }
    DataInputDeserializer deserializer = new DataInputDeserializer(serializer.wrapAsByteBuffer());
    for (SerializationTestType expected : reference) {
        try {
            SerializationTestType actual = expected.getClass().newInstance();
            actual.read(deserializer);
            Assert.assertEquals(expected, actual);
        } catch (Exception e) {
            e.printStackTrace();
            Assert.fail("Test encountered an unexpected exception.");
        }
    }
    reference.clear();
}
Also used : SerializationTestType(org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType) IOException(java.io.IOException) ArrayDeque(java.util.ArrayDeque) IOException(java.io.IOException) Test(org.junit.Test)

Example 9 with SerializationTestType

use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.

the class LargeRecordsTest method testHandleMixedLargeRecordsSpillingAdaptiveSerializer.

@Test
public void testHandleMixedLargeRecordsSpillingAdaptiveSerializer() {
    try {
        final int NUM_RECORDS = 99;
        final int SEGMENT_SIZE = 32 * 1024;
        final RecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
        final RecordDeserializer<SerializationTestType> deserializer = new SpillingAdaptiveSpanningRecordDeserializer<SerializationTestType>(new String[] { System.getProperty("java.io.tmpdir") });
        final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
        List<SerializationTestType> originalRecords = new ArrayList<>((NUM_RECORDS + 1) / 2);
        List<SerializationTestType> deserializedRecords = new ArrayList<>((NUM_RECORDS + 1) / 2);
        LargeObjectType genLarge = new LargeObjectType();
        Random rnd = new Random();
        for (int i = 0; i < NUM_RECORDS; i++) {
            if (i % 2 == 0) {
                originalRecords.add(new IntType(42));
                deserializedRecords.add(new IntType());
            } else {
                originalRecords.add(genLarge.getRandom(rnd));
                deserializedRecords.add(new LargeObjectType());
            }
        }
        // -------------------------------------------------------------------------------------------------------------
        serializer.setNextBuffer(buffer);
        int numRecordsDeserialized = 0;
        for (SerializationTestType record : originalRecords) {
            // serialize record
            if (serializer.addRecord(record).isFullBuffer()) {
                // buffer is full => move to deserializer
                deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
                // deserialize records, as many complete as there are
                while (numRecordsDeserialized < deserializedRecords.size()) {
                    SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
                    if (deserializer.getNextRecord(next).isFullRecord()) {
                        assertEquals(originalRecords.get(numRecordsDeserialized), next);
                        numRecordsDeserialized++;
                    } else {
                        break;
                    }
                }
                // move buffers as long as necessary (for long records)
                while (serializer.setNextBuffer(buffer).isFullBuffer()) {
                    deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
                }
                // deserialize records, as many as there are in the last buffer
                while (numRecordsDeserialized < deserializedRecords.size()) {
                    SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
                    if (deserializer.getNextRecord(next).isFullRecord()) {
                        assertEquals(originalRecords.get(numRecordsDeserialized), next);
                        numRecordsDeserialized++;
                    } else {
                        break;
                    }
                }
            }
        }
        // move the last (incomplete buffer)
        Buffer last = serializer.getCurrentBuffer();
        deserializer.setNextMemorySegment(last.getMemorySegment(), last.getSize());
        serializer.clear();
        // deserialize records, as many as there are in the last buffer
        while (numRecordsDeserialized < deserializedRecords.size()) {
            SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
            assertTrue(deserializer.getNextRecord(next).isFullRecord());
            assertEquals(originalRecords.get(numRecordsDeserialized), next);
            numRecordsDeserialized++;
        }
        // might be that the last big records has not yet been fully moved, and a small one is missing
        assertFalse(serializer.hasData());
        assertFalse(deserializer.hasUnfinishedData());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : Buffer(org.apache.flink.runtime.io.network.buffer.Buffer) ArrayList(java.util.ArrayList) SpanningRecordSerializer(org.apache.flink.runtime.io.network.api.serialization.SpanningRecordSerializer) IntType(org.apache.flink.runtime.io.network.api.serialization.types.IntType) SerializationTestType(org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType) Random(java.util.Random) SpillingAdaptiveSpanningRecordDeserializer(org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer) LargeObjectType(org.apache.flink.runtime.io.network.serialization.types.LargeObjectType) BufferRecycler(org.apache.flink.runtime.io.network.buffer.BufferRecycler) Test(org.junit.Test)

Aggregations

SerializationTestType (org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType)9 Buffer (org.apache.flink.runtime.io.network.buffer.Buffer)6 BufferRecycler (org.apache.flink.runtime.io.network.buffer.BufferRecycler)6 Test (org.junit.Test)6 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)3 Random (java.util.Random)3 ArrayDeque (java.util.ArrayDeque)2 SpanningRecordSerializer (org.apache.flink.runtime.io.network.api.serialization.SpanningRecordSerializer)2 SpillingAdaptiveSpanningRecordDeserializer (org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer)2 IntType (org.apache.flink.runtime.io.network.api.serialization.types.IntType)2 LargeObjectType (org.apache.flink.runtime.io.network.serialization.types.LargeObjectType)2 ByteBuffer (java.nio.ByteBuffer)1 DataInputView (org.apache.flink.core.memory.DataInputView)1 DataOutputView (org.apache.flink.core.memory.DataOutputView)1 MemorySegment (org.apache.flink.core.memory.MemorySegment)1 AdaptiveSpanningRecordDeserializer (org.apache.flink.runtime.io.network.api.serialization.AdaptiveSpanningRecordDeserializer)1