use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.
the class SpanningRecordSerializerTest method testHasData.
@Test
public void testHasData() {
final int SEGMENT_SIZE = 16;
final SpanningRecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
final SerializationTestType randomIntRecord = Util.randomRecord(SerializationTestTypeFactory.INT);
Assert.assertFalse(serializer.hasData());
try {
serializer.addRecord(randomIntRecord);
Assert.assertTrue(serializer.hasData());
serializer.setNextBuffer(buffer);
Assert.assertTrue(serializer.hasData());
serializer.clear();
Assert.assertFalse(serializer.hasData());
serializer.setNextBuffer(buffer);
serializer.addRecord(randomIntRecord);
Assert.assertTrue(serializer.hasData());
serializer.addRecord(randomIntRecord);
Assert.assertTrue(serializer.hasData());
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.
the class SpanningRecordSerializerTest method testEmptyRecords.
@Test
public void testEmptyRecords() {
final int SEGMENT_SIZE = 11;
final SpanningRecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
try {
Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, serializer.setNextBuffer(buffer));
} catch (IOException e) {
e.printStackTrace();
}
try {
SerializationTestType emptyRecord = new SerializationTestType() {
@Override
public SerializationTestType getRandom(Random rnd) {
throw new UnsupportedOperationException();
}
@Override
public int length() {
throw new UnsupportedOperationException();
}
@Override
public void write(DataOutputView out) {
}
@Override
public void read(DataInputView in) {
}
@Override
public int hashCode() {
throw new UnsupportedOperationException();
}
@Override
public boolean equals(Object obj) {
throw new UnsupportedOperationException();
}
};
RecordSerializer.SerializationResult result = serializer.addRecord(emptyRecord);
Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, result);
result = serializer.addRecord(emptyRecord);
Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, result);
result = serializer.addRecord(emptyRecord);
Assert.assertEquals(RecordSerializer.SerializationResult.PARTIAL_RECORD_MEMORY_SEGMENT_FULL, result);
result = serializer.setNextBuffer(buffer);
Assert.assertEquals(RecordSerializer.SerializationResult.FULL_RECORD, result);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.
the class DataInputOutputSerializerTest method testRandomValuesWriteRead.
@Test
public void testRandomValuesWriteRead() {
final int numElements = 100000;
final ArrayDeque<SerializationTestType> reference = new ArrayDeque<SerializationTestType>();
DataOutputSerializer serializer = new DataOutputSerializer(1);
for (SerializationTestType value : Util.randomRecords(numElements)) {
reference.add(value);
try {
value.write(serializer);
} catch (IOException e) {
e.printStackTrace();
Assert.fail("Test encountered an unexpected exception.");
}
}
DataInputDeserializer deserializer = new DataInputDeserializer(serializer.wrapAsByteBuffer());
for (SerializationTestType expected : reference) {
try {
SerializationTestType actual = expected.getClass().newInstance();
actual.read(deserializer);
Assert.assertEquals(expected, actual);
} catch (Exception e) {
e.printStackTrace();
Assert.fail("Test encountered an unexpected exception.");
}
}
reference.clear();
}
use of org.apache.flink.runtime.io.network.api.serialization.types.SerializationTestType in project flink by apache.
the class LargeRecordsTest method testHandleMixedLargeRecordsSpillingAdaptiveSerializer.
@Test
public void testHandleMixedLargeRecordsSpillingAdaptiveSerializer() {
try {
final int NUM_RECORDS = 99;
final int SEGMENT_SIZE = 32 * 1024;
final RecordSerializer<SerializationTestType> serializer = new SpanningRecordSerializer<SerializationTestType>();
final RecordDeserializer<SerializationTestType> deserializer = new SpillingAdaptiveSpanningRecordDeserializer<SerializationTestType>(new String[] { System.getProperty("java.io.tmpdir") });
final Buffer buffer = new Buffer(MemorySegmentFactory.allocateUnpooledSegment(SEGMENT_SIZE), mock(BufferRecycler.class));
List<SerializationTestType> originalRecords = new ArrayList<>((NUM_RECORDS + 1) / 2);
List<SerializationTestType> deserializedRecords = new ArrayList<>((NUM_RECORDS + 1) / 2);
LargeObjectType genLarge = new LargeObjectType();
Random rnd = new Random();
for (int i = 0; i < NUM_RECORDS; i++) {
if (i % 2 == 0) {
originalRecords.add(new IntType(42));
deserializedRecords.add(new IntType());
} else {
originalRecords.add(genLarge.getRandom(rnd));
deserializedRecords.add(new LargeObjectType());
}
}
// -------------------------------------------------------------------------------------------------------------
serializer.setNextBuffer(buffer);
int numRecordsDeserialized = 0;
for (SerializationTestType record : originalRecords) {
// serialize record
if (serializer.addRecord(record).isFullBuffer()) {
// buffer is full => move to deserializer
deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
// deserialize records, as many complete as there are
while (numRecordsDeserialized < deserializedRecords.size()) {
SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
if (deserializer.getNextRecord(next).isFullRecord()) {
assertEquals(originalRecords.get(numRecordsDeserialized), next);
numRecordsDeserialized++;
} else {
break;
}
}
// move buffers as long as necessary (for long records)
while (serializer.setNextBuffer(buffer).isFullBuffer()) {
deserializer.setNextMemorySegment(serializer.getCurrentBuffer().getMemorySegment(), SEGMENT_SIZE);
}
// deserialize records, as many as there are in the last buffer
while (numRecordsDeserialized < deserializedRecords.size()) {
SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
if (deserializer.getNextRecord(next).isFullRecord()) {
assertEquals(originalRecords.get(numRecordsDeserialized), next);
numRecordsDeserialized++;
} else {
break;
}
}
}
}
// move the last (incomplete buffer)
Buffer last = serializer.getCurrentBuffer();
deserializer.setNextMemorySegment(last.getMemorySegment(), last.getSize());
serializer.clear();
// deserialize records, as many as there are in the last buffer
while (numRecordsDeserialized < deserializedRecords.size()) {
SerializationTestType next = deserializedRecords.get(numRecordsDeserialized);
assertTrue(deserializer.getNextRecord(next).isFullRecord());
assertEquals(originalRecords.get(numRecordsDeserialized), next);
numRecordsDeserialized++;
}
// might be that the last big records has not yet been fully moved, and a small one is missing
assertFalse(serializer.hasData());
assertFalse(deserializer.hasUnfinishedData());
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
Aggregations