Search in sources :

Example 1 with LargeObjectType

use of org.apache.flink.runtime.io.network.serialization.types.LargeObjectType in project flink by apache.

the class LargeRecordsTest method testHandleMixedLargeRecords.

@Test
public void testHandleMixedLargeRecords() {
    try {
        final int NUM_RECORDS = 99;
        final int SEGMENT_SIZE = 32 * 1024;
        final RecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
        final RecordDeserializer<SerializationTestType> deserializer = new AdaptiveSpanningRecordDeserializer<SerializationTestType>();
        final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
        List<SerializationTestType> originalRecords = new ArrayList<SerializationTestType>((NUM_RECORDS + 1) / 2);
        List<SerializationTestType> deserializedRecords = new ArrayList<SerializationTestType>((NUM_RECORDS + 1) / 2);
        LargeObjectType genLarge = new LargeObjectType();
        Random rnd = new Random();
        for (int i = 0; i < NUM_RECORDS; i++) {
            if (i % 2 == 0) {
                originalRecords.add(new IntType(42));
                deserializedRecords.add(new IntType());
            } else {
                originalRecords.add(genLarge.getRandom(rnd));
                deserializedRecords.add(new LargeObjectType());
            }
        }
        // -------------------------------------------------------------------------------------------------------------
        serializer.setNextBuffer(buffer);
        int numRecordsDeserialized = 0;
        for (SerializationTestType record : originalRecords) {
            // serialize record
            if (serializer.addRecord(record).isFullBuffer()) {
                // buffer is full => move to deserializer
                deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
                // deserialize records, as many complete as there are
                while (numRecordsDeserialized < deserializedRecords.size()) {
                    SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
                    if (deserializer.getNextRecord(next).isFullRecord()) {
                        assertEquals(originalRecords.get(numRecordsDeserialized), next);
                        numRecordsDeserialized++;
                    } else {
                        break;
                    }
                }
                // move buffers as long as necessary (for long records)
                while (serializer.setNextBuffer(buffer).isFullBuffer()) {
                    deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
                }
                // deserialize records, as many as there are in the last buffer
                while (numRecordsDeserialized < deserializedRecords.size()) {
                    SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
                    if (deserializer.getNextRecord(next).isFullRecord()) {
                        assertEquals(originalRecords.get(numRecordsDeserialized), next);
                        numRecordsDeserialized++;
                    } else {
                        break;
                    }
                }
            }
        }
        // move the last (incomplete buffer)
        Buffer last = serializer.getCurrentBuffer();
        deserializer.setNextMemorySegment(last.getMemorySegment(), last.getSize());
        serializer.clear();
        // deserialize records, as many as there are in the last buffer
        while (numRecordsDeserialized < deserializedRecords.size()) {
            SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
            assertTrue(deserializer.getNextRecord(next).isFullRecord());
            assertEquals(originalRecords.get(numRecordsDeserialized), next);
            numRecordsDeserialized++;
        }
        // might be that the last big records has not yet been fully moved, and a small one is missing
        assertFalse(serializer.hasData());
        assertFalse(deserializer.hasUnfinishedData());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : Buffer(org.apache.flink.runtime.io.network.buffer.Buffer) SpillingAdaptiveSpanningRecordDeserializer(org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer) AdaptiveSpanningRecordDeserializer(org.apache.flink.runtime.io.network.api.serialization.AdaptiveSpanningRecordDeserializer) ArrayList(java.util.ArrayList) SpanningRecordSerializer(org.apache.flink.runtime.io.network.api.serialization.SpanningRecordSerializer) IntType(org.apache.flink.runtime.io.network.api.serialization.types.IntType) SerializationTestType(org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType) Random(java.util.Random) LargeObjectType(org.apache.flink.runtime.io.network.serialization.types.LargeObjectType) BufferRecycler(org.apache.flink.runtime.io.network.buffer.BufferRecycler) Test(org.junit.Test)

Example 2 with LargeObjectType

use of org.apache.flink.runtime.io.network.serialization.types.LargeObjectType in project flink by apache.

the class SpanningRecordSerializationTest method testHandleMixedLargeRecords.

@Test
public void testHandleMixedLargeRecords() throws Exception {
    final int numValues = 99;
    final int segmentSize = 32 * 1024;
    List<SerializationTestType> originalRecords = new ArrayList<>((numValues + 1) / 2);
    LargeObjectType genLarge = new LargeObjectType();
    Random rnd = new Random();
    for (int i = 0; i < numValues; i++) {
        if (i % 2 == 0) {
            originalRecords.add(new IntType(42));
        } else {
            originalRecords.add(genLarge.getRandom(rnd));
        }
    }
    testSerializationRoundTrip(originalRecords, segmentSize);
}
Also used : SerializationTestType(org.apache.flink.testutils.serialization.types.SerializationTestType) Random(java.util.Random) LargeObjectType(org.apache.flink.runtime.io.network.serialization.types.LargeObjectType) ArrayList(java.util.ArrayList) IntType(org.apache.flink.testutils.serialization.types.IntType) Test(org.junit.Test)

Example 3 with LargeObjectType

use of org.apache.flink.runtime.io.network.serialization.types.LargeObjectType in project flink by apache.

the class LargeRecordsTest method testHandleMixedLargeRecordsSpillingAdaptiveSerializer.

@Test
public void testHandleMixedLargeRecordsSpillingAdaptiveSerializer() {
    try {
        final int NUM_RECORDS = 99;
        final int SEGMENT_SIZE = 32 * 1024;
        final RecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
        final RecordDeserializer<SerializationTestType> deserializer = new SpillingAdaptiveSpanningRecordDeserializer<SerializationTestType>(new String[] { System.getProperty("java.io.tmpdir") });
        final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
        List<SerializationTestType> originalRecords = new ArrayList<>((NUM_RECORDS + 1) / 2);
        List<SerializationTestType> deserializedRecords = new ArrayList<>((NUM_RECORDS + 1) / 2);
        LargeObjectType genLarge = new LargeObjectType();
        Random rnd = new Random();
        for (int i = 0; i < NUM_RECORDS; i++) {
            if (i % 2 == 0) {
                originalRecords.add(new IntType(42));
                deserializedRecords.add(new IntType());
            } else {
                originalRecords.add(genLarge.getRandom(rnd));
                deserializedRecords.add(new LargeObjectType());
            }
        }
        // -------------------------------------------------------------------------------------------------------------
        serializer.setNextBuffer(buffer);
        int numRecordsDeserialized = 0;
        for (SerializationTestType record : originalRecords) {
            // serialize record
            if (serializer.addRecord(record).isFullBuffer()) {
                // buffer is full => move to deserializer
                deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
                // deserialize records, as many complete as there are
                while (numRecordsDeserialized < deserializedRecords.size()) {
                    SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
                    if (deserializer.getNextRecord(next).isFullRecord()) {
                        assertEquals(originalRecords.get(numRecordsDeserialized), next);
                        numRecordsDeserialized++;
                    } else {
                        break;
                    }
                }
                // move buffers as long as necessary (for long records)
                while (serializer.setNextBuffer(buffer).isFullBuffer()) {
                    deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
                }
                // deserialize records, as many as there are in the last buffer
                while (numRecordsDeserialized < deserializedRecords.size()) {
                    SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
                    if (deserializer.getNextRecord(next).isFullRecord()) {
                        assertEquals(originalRecords.get(numRecordsDeserialized), next);
                        numRecordsDeserialized++;
                    } else {
                        break;
                    }
                }
            }
        }
        // move the last (incomplete buffer)
        Buffer last = serializer.getCurrentBuffer();
        deserializer.setNextMemorySegment(last.getMemorySegment(), last.getSize());
        serializer.clear();
        // deserialize records, as many as there are in the last buffer
        while (numRecordsDeserialized < deserializedRecords.size()) {
            SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
            assertTrue(deserializer.getNextRecord(next).isFullRecord());
            assertEquals(originalRecords.get(numRecordsDeserialized), next);
            numRecordsDeserialized++;
        }
        // might be that the last big records has not yet been fully moved, and a small one is missing
        assertFalse(serializer.hasData());
        assertFalse(deserializer.hasUnfinishedData());
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    }
}
Also used : Buffer(org.apache.flink.runtime.io.network.buffer.Buffer) ArrayList(java.util.ArrayList) SpanningRecordSerializer(org.apache.flink.runtime.io.network.api.serialization.SpanningRecordSerializer) IntType(org.apache.flink.runtime.io.network.api.serialization.types.IntType) SerializationTestType(org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType) Random(java.util.Random) SpillingAdaptiveSpanningRecordDeserializer(org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer) LargeObjectType(org.apache.flink.runtime.io.network.serialization.types.LargeObjectType) BufferRecycler(org.apache.flink.runtime.io.network.buffer.BufferRecycler) Test(org.junit.Test)

Aggregations

ArrayList (java.util.ArrayList)3 Random (java.util.Random)3 LargeObjectType (org.apache.flink.runtime.io.network.serialization.types.LargeObjectType)3 Test (org.junit.Test)3 SpanningRecordSerializer (org.apache.flink.runtime.io.network.api.serialization.SpanningRecordSerializer)2 SpillingAdaptiveSpanningRecordDeserializer (org.apache.flink.runtime.io.network.api.serialization.SpillingAdaptiveSpanningRecordDeserializer)2 IntType (org.apache.flink.runtime.io.network.api.serialization.types.IntType)2 SerializationTestType (org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType)2 Buffer (org.apache.flink.runtime.io.network.buffer.Buffer)2 BufferRecycler (org.apache.flink.runtime.io.network.buffer.BufferRecycler)2 AdaptiveSpanningRecordDeserializer (org.apache.flink.runtime.io.network.api.serialization.AdaptiveSpanningRecordDeserializer)1 IntType (org.apache.flink.testutils.serialization.types.IntType)1 SerializationTestType (org.apache.flink.testutils.serialization.types.SerializationTestType)1