Search in sources :

Example 11 with SerializationException

use of io.pravega.common.io.SerializationException in project pravega by pravega.

the class RevisionDataOutputStreamTests method testNonSeekableOutputShorterLength.

/**
 * Tests the NonSeekableRevisionDataOutput class when we provide a shorter length than expected.
 */
@Test
public void testNonSeekableOutputShorterLength() throws Exception {
    @Cleanup val s = new ByteArrayOutputStream();
    // Wrap the stream, but do not auto-close it since we expect close() to fail, which is verified below.
    val impl = RevisionDataOutputStream.wrap(s);
    int correctLength = Byte.BYTES + Short.BYTES + Integer.BYTES;
    // Shorter length.
    impl.length(correctLength - 1);
    impl.writeByte(1);
    impl.writeShort(2);
    impl.writeInt(3);
    // Verify close() fails.
    AssertExtensions.assertThrows("RevisionDataOutputStream.close() did not throw for byte mismatch.", impl::close, ex -> ex instanceof SerializationException);
    // Verify the written data cannot be read back (we'll get an EOF at this time).
    @Cleanup val inputStream = RevisionDataInputStream.wrap(new ByteArrayInputStream(s.toByteArray()));
    inputStream.readByte();
    inputStream.readShort();
    AssertExtensions.assertThrows("Expecting EOF.", inputStream::readInt, ex -> ex instanceof EOFException);
}
Also used : lombok.val(lombok.val) SerializationException(io.pravega.common.io.SerializationException) ByteArrayInputStream(java.io.ByteArrayInputStream) EOFException(java.io.EOFException) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Cleanup(lombok.Cleanup) Test(org.junit.Test)

Example 12 with SerializationException

use of io.pravega.common.io.SerializationException in project pravega by pravega.

the class AsyncTableEntryReaderTests method testReadKeyResultTooShort.

/**
 * Tests the ability to handle a case where the key could not be read before the read result was done.
 */
@Test
public void testReadKeyResultTooShort() {
    val testItems = generateTestItems();
    for (val e : testItems) {
        // Start a new reader & processor for this key-serialization pair.
        val keyReader = AsyncTableEntryReader.readKey(1L, SERIALIZER, new TimeoutTimer(TIMEOUT));
        @Cleanup val rr = new ReadResultMock(e.serialization, e.key.length - 1, 1);
        AsyncReadResultProcessor.process(rr, keyReader, executorService());
        AssertExtensions.assertThrows("Unexpected behavior for shorter read result.", () -> keyReader.getResult().get(BASE_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS), ex -> ex instanceof SerializationException);
    }
}
Also used : lombok.val(lombok.val) SerializationException(io.pravega.common.io.SerializationException) ReadResultMock(io.pravega.segmentstore.server.ReadResultMock) Cleanup(lombok.Cleanup) TimeoutTimer(io.pravega.common.TimeoutTimer) Test(org.junit.Test)

Example 13 with SerializationException

use of io.pravega.common.io.SerializationException in project pravega by pravega.

the class FixedKeyLengthTableCompactorTests method testCompactionConcurrentUpdate.

/**
 * Tests the case when a compaction executes concurrently with one of compact-copied keys being updated. This is a
 * scenario specific to the Fixed-Key-Length Table Segment as the indexing is done at the time of the update
 * and not in the background (and hence in sequence).
 */
@Test
public void testCompactionConcurrentUpdate() {
    @Cleanup val context = createContext(KEY_COUNT * UPDATE_ENTRY_LENGTH);
    val rnd = new Random(0);
    // Generate keys.
    val keys = new ArrayList<BufferView>();
    val expectedValues = new HashMap<BufferView, BufferView>();
    for (int i = 0; i < KEY_COUNT; i++) {
        byte[] key = new byte[KEY_LENGTH];
        rnd.nextBytes(key);
        keys.add(new ByteArraySegment(key));
    }
    // Set utilization threshold to 76% so that we may trigger a compaction when we update half the keys once.
    context.setSegmentState(0, 0, 0, 0, 76);
    // Insert all the keys ...
    for (val key : keys) {
        expectedValues.put(key, updateKey(key, context, rnd));
    }
    // ... then update the second half. This should require a compaction which results in a copy.
    for (int i = keys.size() / 2; i < keys.size(); i++) {
        expectedValues.put(keys.get(i), updateKey(keys.get(i), context, rnd));
    }
    val originalLength = context.segmentMetadata.getLength();
    val c = context.getCompactor();
    Assert.assertEquals("Unexpected number of unique entries pre-compaction.", expectedValues.size(), (long) c.getUniqueEntryCount().join());
    Assert.assertEquals("Unexpected number of total entries pre-compaction.", expectedValues.size() + expectedValues.size() / 2, IndexReader.getTotalEntryCount(context.segmentMetadata));
    Assert.assertTrue("Expecting a compaction to be required.", c.isCompactionRequired().join());
    context.segment.setBeforeAppendCallback(() -> {
        // This callback is invoked while the compactor is running; it is after it has read and processed all candidates
        // and immediately before the conditional append it performs is about to be executed.
        // We can now update one of the keys that are copied with a new value.
        // Make sure we don't end up in an infinite loop here.
        context.segment.setBeforeAppendCallback(null);
        val firstKey = keys.get(0);
        expectedValues.put(firstKey, updateKey(firstKey, context, rnd));
    });
    c.compact(new TimeoutTimer(TIMEOUT)).join();
    // We should now verify that the compaction did eventually succeed and that all the keys have the correct (expected) values.
    AssertExtensions.assertGreaterThan("Segment length did not change.", originalLength, context.segmentMetadata.getLength());
    AssertExtensions.assertGreaterThan("No compaction occurred.", 0, IndexReader.getCompactionOffset(context.segmentMetadata));
    Assert.assertEquals("Unexpected number of unique entries post-compaction.", expectedValues.size(), (long) c.getUniqueEntryCount().join());
    Assert.assertEquals("Unexpected number of total entries post-compaction.", expectedValues.size(), IndexReader.getTotalEntryCount(context.segmentMetadata));
    // Read all the entries from the segment and validate that they are as expected.
    val actualValues = new HashMap<BufferView, BufferView>();
    context.segment.attributeIterator(AttributeId.Variable.minValue(KEY_LENGTH), AttributeId.Variable.maxValue(KEY_LENGTH), TIMEOUT).join().forEachRemaining(attributeValues -> {
        for (val av : attributeValues) {
            val reader = BufferView.wrap(context.segment.read(av.getValue(), UPDATE_ENTRY_LENGTH, TIMEOUT).readRemaining(UPDATE_ENTRY_LENGTH, TIMEOUT)).getBufferViewReader();
            try {
                val e = AsyncTableEntryReader.readEntryComponents(reader, av.getValue(), context.serializer);
                Assert.assertEquals("Mismatch keys.", av.getKey().toBuffer(), e.getKey());
                actualValues.put(e.getKey(), e.getValue());
            } catch (SerializationException ex) {
                throw new CompletionException(ex);
            }
        }
    }, executorService()).join();
    AssertExtensions.assertMapEquals("Unexpected entries in the segment after compaction.", expectedValues, actualValues);
}
Also used : lombok.val(lombok.val) IntStream(java.util.stream.IntStream) TableAttributes(io.pravega.segmentstore.contracts.tables.TableAttributes) Getter(lombok.Getter) AssertExtensions(io.pravega.test.common.AssertExtensions) Cleanup(lombok.Cleanup) HashMap(java.util.HashMap) Random(java.util.Random) ArrayList(java.util.ArrayList) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) BufferView(io.pravega.common.util.BufferView) SerializationException(io.pravega.common.io.SerializationException) Attributes(io.pravega.segmentstore.contracts.Attributes) ImmutableMap(com.google.common.collect.ImmutableMap) TimeoutTimer(io.pravega.common.TimeoutTimer) AttributeId(io.pravega.segmentstore.contracts.AttributeId) lombok.val(lombok.val) CompletionException(java.util.concurrent.CompletionException) Test(org.junit.Test) Collectors(java.util.stream.Collectors) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) ByteArraySegment(io.pravega.common.util.ByteArraySegment) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) Assert(org.junit.Assert) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Collections(java.util.Collections) ByteArraySegment(io.pravega.common.util.ByteArraySegment) SerializationException(io.pravega.common.io.SerializationException) Random(java.util.Random) HashMap(java.util.HashMap) CompletionException(java.util.concurrent.CompletionException) ArrayList(java.util.ArrayList) Cleanup(lombok.Cleanup) TimeoutTimer(io.pravega.common.TimeoutTimer) Test(org.junit.Test)

Aggregations

SerializationException (io.pravega.common.io.SerializationException)13 Test (org.junit.Test)9 lombok.val (lombok.val)8 Cleanup (lombok.Cleanup)7 TimeoutTimer (io.pravega.common.TimeoutTimer)4 ByteArraySegment (io.pravega.common.util.ByteArraySegment)4 ReadResultMock (io.pravega.segmentstore.server.ReadResultMock)3 AssertExtensions (io.pravega.test.common.AssertExtensions)3 ArrayList (java.util.ArrayList)3 Assert (org.junit.Assert)3 Exceptions (io.pravega.common.Exceptions)2 BufferView (io.pravega.common.util.BufferView)2 DurableDataLog (io.pravega.segmentstore.storage.DurableDataLog)2 ThreadPooledTestSuite (io.pravega.test.common.ThreadPooledTestSuite)2 ByteArrayInputStream (java.io.ByteArrayInputStream)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 EOFException (java.io.EOFException)2 IOException (java.io.IOException)2 Duration (java.time.Duration)2 List (java.util.List)2