Search in sources :

Example 81 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class DataFrameBuilderTests method testAppendWithCommitFailure.

/**
 * Tests the case when the DataLog fails to commit random frames.
 * Commit errors should affect only the LogItems that were part of it. It should cause data to be dropped
 * and affected appends failed.
 * This should be done both with large and with small LogItems. Large items span multiple frames.
 */
@Test
public void testAppendWithCommitFailure() throws Exception {
    // Fail the commit to DurableDataLog after this many writes.
    int failAt = 7;
    List<TestLogItem> records = DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
    records.addAll(DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size()));
    @Cleanup TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService());
    dataLog.initialize(TIMEOUT);
    val asyncInjector = new ErrorInjector<Exception>(count -> count >= failAt, IntentionalException::new);
    dataLog.setAppendErrorInjectors(null, asyncInjector);
    AtomicInteger failCount = new AtomicInteger();
    List<DataFrameBuilder.CommitArgs> successCommits = Collections.synchronizedList(new ArrayList<>());
    // Keep a reference to the builder (once created) so we can inspect its failure cause).
    val builderRef = new AtomicReference<DataFrameBuilder<TestLogItem>>();
    val attemptCount = new AtomicInteger();
    BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> {
        attemptCount.decrementAndGet();
        // Check that we actually did want an exception to happen.
        Throwable expectedError = Exceptions.unwrap(asyncInjector.getLastCycleException());
        Assert.assertNotNull("An error happened but none was expected: " + ex, expectedError);
        Throwable actualError = Exceptions.unwrap(ex);
        if (!(ex instanceof ObjectClosedException)) {
            // First failure.
            Assert.assertEquals("Unexpected error occurred upon commit.", expectedError, actualError);
        }
        if (builderRef.get().failureCause() != null) {
            checkFailureCause(builderRef.get(), ce -> ce instanceof IntentionalException);
        }
        failCount.incrementAndGet();
    };
    val args = new DataFrameBuilder.Args(ca -> attemptCount.incrementAndGet(), successCommits::add, errorCallback, executorService());
    try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
        builderRef.set(b);
        try {
            for (val r : records) {
                b.append(r);
            }
            b.close();
        } catch (ObjectClosedException ex) {
            TestUtils.await(() -> b.failureCause() != null, 20, TIMEOUT.toMillis());
            // If DataFrameBuilder is closed, then we must have had an exception thrown via the callback before.
            Assert.assertNotNull("DataFrameBuilder is closed, yet failure cause is not set yet.", b.failureCause());
            checkFailureCause(b, ce -> ce instanceof IntentionalException);
        }
    }
    TestUtils.await(() -> successCommits.size() >= attemptCount.get(), 20, TIMEOUT.toMillis());
    // Read all committed items.
    @Cleanup val reader = new DataFrameReader<>(dataLog, new TestSerializer(), CONTAINER_ID);
    val readItems = new ArrayList<TestLogItem>();
    DataFrameRecord<TestLogItem> readItem;
    while ((readItem = reader.getNext()) != null) {
        readItems.add(readItem.getItem());
    }
    val lastCommitSeqNo = successCommits.stream().mapToLong(DataFrameBuilder.CommitArgs::getLastFullySerializedSequenceNumber).max().orElse(-1);
    val expectedItems = records.stream().filter(r -> r.getSequenceNumber() <= lastCommitSeqNo).collect(Collectors.toList());
    AssertExtensions.assertListEquals("Items read back do not match expected values.", expectedItems, readItems, TestLogItem::equals);
    // Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed.
    val frames = dataLog.getAllEntries(ri -> DataFrame.read(ri.getPayload(), ri.getLength(), ri.getAddress()));
    // Check the correctness of the commit callback.
    AssertExtensions.assertGreaterThan("Not enough Data Frames were generated.", 1, frames.size());
    Assert.assertEquals("Unexpected number of frames generated.", successCommits.size(), frames.size());
}
Also used : ObjectClosedException(io.pravega.common.ObjectClosedException) AssertExtensions(io.pravega.test.common.AssertExtensions) Exceptions(io.pravega.common.Exceptions) Cleanup(lombok.Cleanup) HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Duration(java.time.Duration) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) Callbacks(io.pravega.common.function.Callbacks) Predicate(java.util.function.Predicate) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) List(java.util.List) Rule(org.junit.Rule) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) TestUtils(io.pravega.test.common.TestUtils) Comparator(java.util.Comparator) Assert(org.junit.Assert) Collections(java.util.Collections) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) ArrayList(java.util.ArrayList) Cleanup(lombok.Cleanup) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) ErrorInjector(io.pravega.test.common.ErrorInjector) AtomicReference(java.util.concurrent.atomic.AtomicReference) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ObjectClosedException(io.pravega.common.ObjectClosedException) Test(org.junit.Test)

Example 82 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class DataFrameBuilderTests method testAppendWithSerializationFailure.

/**
 * Tests the case when the appends fail because of Serialization failures.
 * Serialization errors should only affect the append that caused it. It should not cause any data to be dropped
 * or put the DataFrameBuilder in a stuck state.
 * This should be done both with large and with small LogItems. Large items span multiple frames.
 */
@Test
public void testAppendWithSerializationFailure() throws Exception {
    // Fail every X records.
    int failEvery = 7;
    ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
    records.addAll(DataFrameTestHelpers.generateLogItems(RECORD_COUNT / 2, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size()));
    // Have every other 'failEvery' record fail after writing 90% of itself.
    for (int i = 0; i < records.size(); i += failEvery) {
        records.get(i).failSerializationAfterComplete(0.9, new IOException("intentional " + i));
    }
    HashSet<Integer> failedIndices = new HashSet<>();
    val order = new HashMap<DataFrameBuilder.CommitArgs, Integer>();
    try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
        dataLog.initialize(TIMEOUT);
        List<DataFrameBuilder.CommitArgs> commitFrames = Collections.synchronizedList(new ArrayList<>());
        BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
        val args = new DataFrameBuilder.Args(DataFrameTestHelpers.appendOrder(order), commitFrames::add, errorCallback, executorService());
        try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
            for (int i = 0; i < records.size(); i++) {
                try {
                    b.append(records.get(i));
                } catch (IOException ex) {
                    failedIndices.add(i);
                }
            }
        }
        // Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed.
        TestUtils.await(() -> commitFrames.size() >= order.size(), 20, TIMEOUT.toMillis());
        List<DataFrame.DataFrameEntryIterator> frames = dataLog.getAllEntries(readItem -> DataFrame.read(readItem.getPayload(), readItem.getLength(), readItem.getAddress()));
        Assert.assertEquals("Unexpected number of frames generated.", commitFrames.size(), frames.size());
        // Check the correctness of the commit callback.
        AssertExtensions.assertGreaterThan("Not enough Data Frames were generated.", 1, commitFrames.size());
        AssertExtensions.assertGreaterThan("Not enough LogItems were failed.", records.size() / failEvery, failedIndices.size());
        DataFrameTestHelpers.checkReadRecords(frames, records, failedIndices, r -> new ByteArraySegment(r.getFullSerialization()));
    }
}
Also used : lombok.val(lombok.val) ObjectClosedException(io.pravega.common.ObjectClosedException) AssertExtensions(io.pravega.test.common.AssertExtensions) Exceptions(io.pravega.common.Exceptions) Cleanup(lombok.Cleanup) HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Duration(java.time.Duration) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) Callbacks(io.pravega.common.function.Callbacks) Predicate(java.util.function.Predicate) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) List(java.util.List) Rule(org.junit.Rule) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) TestUtils(io.pravega.test.common.TestUtils) Comparator(java.util.Comparator) Assert(org.junit.Assert) Collections(java.util.Collections) ByteArraySegment(io.pravega.common.util.ByteArraySegment) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) HashMap(java.util.HashMap) IOException(java.io.IOException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 83 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class DataFrameBuilderTests method testFlush.

/**
 * Tests the flush() method.
 */
@Test
public void testFlush() throws Exception {
    // Append two records, make sure they are not flushed, close the Builder, then make sure they are flushed.
    try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
        dataLog.initialize(TIMEOUT);
        ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(2, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
        List<DataFrameBuilder.CommitArgs> commitFrames = Collections.synchronizedList(new ArrayList<>());
        BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
        val args = new DataFrameBuilder.Args(Callbacks::doNothing, commitFrames::add, errorCallback, executorService());
        @Cleanup DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args);
        for (TestLogItem item : records) {
            b.append(item);
        }
        // Check the correctness of the commit callback.
        Assert.assertEquals("A Data Frame was generated but none was expected yet.", 0, commitFrames.size());
        // Invoke flush.
        b.flush();
        // Wait for all the frames commit callbacks to be invoked.
        TestUtils.await(() -> commitFrames.size() >= 1, 20, TIMEOUT.toMillis());
        // Check the correctness of the commit callback (after closing the builder).
        Assert.assertEquals("Exactly one Data Frame was expected so far.", 1, commitFrames.size());
        // Read all entries in the Log and interpret them as DataFrames, then verify the records can be reconstructed.
        val frames = dataLog.getAllEntries(readItem -> DataFrame.read(readItem.getPayload(), readItem.getLength(), readItem.getAddress()));
        Assert.assertEquals("Unexpected number of frames generated.", commitFrames.size(), frames.size());
        DataFrameTestHelpers.checkReadRecords(frames, records, r -> new ByteArraySegment(r.getFullSerialization()));
    }
}
Also used : ObjectClosedException(io.pravega.common.ObjectClosedException) AssertExtensions(io.pravega.test.common.AssertExtensions) Exceptions(io.pravega.common.Exceptions) Cleanup(lombok.Cleanup) HashMap(java.util.HashMap) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Duration(java.time.Duration) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) Callbacks(io.pravega.common.function.Callbacks) Predicate(java.util.function.Predicate) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) List(java.util.List) Rule(org.junit.Rule) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) TestUtils(io.pravega.test.common.TestUtils) Comparator(java.util.Comparator) Assert(org.junit.Assert) Collections(java.util.Collections) lombok.val(lombok.val) ByteArraySegment(io.pravega.common.util.ByteArraySegment) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) Cleanup(lombok.Cleanup) Callbacks(io.pravega.common.function.Callbacks) Test(org.junit.Test)

Example 84 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class ContainerReadIndexTests method testCacheFullDeadlock.

/**
 * Tests a deadlock-prone scenario involving multiple Storage read requests from multiple segments, all hitting a
 * CacheFullException while trying to process.
 *
 * Steps:
 * 1. Segment 1: Storage Read Complete -> Ack -> Insert in Index -> Acquire (ReadIndex1.Lock[Thread1]) -> Insert in Cache [Request1]
 * 2. Segment 2: Storage Read Complete -> Ack -> Insert in Index -> Acquire (ReadIndex2.Lock[Thread2]) -> Insert in Cache [Request2]
 * 3. Cache is full. Deadlock occurs if:
 * 3.1. [Request1] invokes Cache Eviction, which wants to acquire ReadIndex2.Lock, but it is owned by Thread2.
 * 3.2. [Request2] invokes Cache Eviction, which wants to acquire ReadIndex1.Lock, but it is owned by Thread1.
 *
 * This test verifies that no deadlock occurs by simulating this exact scenario. It verifies that all requests eventually
 * complete successfully (as the deadlock victim will back off and retry).
 */
@Test
public void testCacheFullDeadlock() throws Exception {
    // This is the actual cache size, even if we set a lower value than this.
    val maxCacheSize = 2 * 1024 * 1024;
    // Fill up most of the cache - this is also a candidate for eviction.
    val append1Size = (int) (0.75 * maxCacheSize);
    // Dummy append - need to register the read index as a cache client.
    val append2Size = 1;
    val segmentSize = maxCacheSize + 1;
    val config = ReadIndexConfig.builder().with(ReadIndexConfig.MEMORY_READ_MIN_LENGTH, // Default: Off (we have a special test for this).
    0).with(ReadIndexConfig.STORAGE_READ_ALIGNMENT, maxCacheSize).build();
    CachePolicy cachePolicy = new CachePolicy(maxCacheSize, Duration.ZERO, Duration.ofMillis(1));
    @Cleanup TestContext context = new TestContext(config, cachePolicy, maxCacheSize);
    // Block the first insert (this will be from segment 1
    val append1Address = new AtomicInteger(0);
    context.cacheStorage.insertCallback = a -> append1Address.compareAndSet(0, a);
    val segment1Delete = new ReusableLatch();
    context.cacheStorage.beforeDelete = deleteAddress -> {
        if (deleteAddress == append1Address.get()) {
            // Block eviction of the first segment 1 data (just the first; we want the rest to go through).
            Exceptions.handleInterrupted(segment1Delete::await);
        }
    };
    // Create segments and make each of them slightly bigger than the cache capacity.
    long segment1Id = createSegment(0, context);
    long segment2Id = createSegment(1, context);
    val segment1Metadata = context.metadata.getStreamSegmentMetadata(segment1Id);
    val segment2Metadata = context.metadata.getStreamSegmentMetadata(segment2Id);
    segment1Metadata.setLength(segmentSize);
    segment1Metadata.setStorageLength(segmentSize);
    segment2Metadata.setLength(segmentSize);
    segment2Metadata.setStorageLength(segmentSize);
    createSegmentsInStorage(context);
    context.storage.openWrite(segment1Metadata.getName()).thenCompose(handle -> context.storage.write(handle, 0, new ByteArrayInputStream(new byte[segmentSize]), segmentSize, TIMEOUT)).join();
    context.storage.openWrite(segment2Metadata.getName()).thenCompose(handle -> context.storage.write(handle, 0, new ByteArrayInputStream(new byte[segmentSize]), segmentSize, TIMEOUT)).join();
    // Write some data into the cache. This will become a candidate for eviction at the next step.
    context.readIndex.append(segment1Id, 0, new ByteArraySegment(new byte[append1Size]));
    // Write some data into Segment 2's index. This will have no effect on the cache, but we will register it with the Cache Manager.
    context.readIndex.append(segment2Id, 0, new ByteArraySegment(new byte[append2Size]));
    // Initiate the first Storage read. This should exceed the max cache size, so it should trigger the cleanup.
    val segment1Read = context.readIndex.read(segment1Id, append1Size, segmentSize - append1Size, TIMEOUT).next();
    Assert.assertEquals(ReadResultEntryType.Storage, segment1Read.getType());
    segment1Read.requestContent(TIMEOUT);
    // This one should complete right away.
    segment1Read.getContent().get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    // Wait for the delete callback to be latched.
    TestUtils.await(() -> segment1Delete.getQueueLength() > 0, 10, TIMEOUT.toMillis());
    // Initiate the second Storage read. This should also exceed the max cache size and trigger another cleanup, but
    // (most importantly) on a different thread.
    val segment2Read = context.readIndex.read(segment2Id, append2Size, segmentSize - append2Size, TIMEOUT).next();
    Assert.assertEquals(ReadResultEntryType.Storage, segment2Read.getType());
    segment2Read.requestContent(TIMEOUT);
    // As with the first one, this should complete right away.
    segment2Read.getContent().get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    // We use yet another thread to validate that no deadlock occurs. This should briefly block on Segment 2's Read index's
    // lock, but it should be unblocked when we release that (next step).
    val append2Future = CompletableFuture.runAsync(() -> {
        try {
            context.readIndex.append(segment2Id, append2Size, new ByteArraySegment(new byte[append1Size]));
        } catch (Exception ex) {
            throw new CompletionException(ex);
        }
    }, executorService());
    // Release the delete blocker. If all goes well, all the other operations should be unblocked at this point.
    segment1Delete.release();
    append2Future.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
}
Also used : lombok.val(lombok.val) Arrays(java.util.Arrays) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) ReadOnlyStorage(io.pravega.segmentstore.storage.ReadOnlyStorage) RequiredArgsConstructor(lombok.RequiredArgsConstructor) TimeoutException(java.util.concurrent.TimeoutException) Cleanup(lombok.Cleanup) Random(java.util.Random) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Map(java.util.Map) CachePolicy(io.pravega.segmentstore.server.CachePolicy) TestCacheManager(io.pravega.segmentstore.server.TestCacheManager) CancellationException(java.util.concurrent.CancellationException) Collection(java.util.Collection) InMemoryStorage(io.pravega.segmentstore.storage.mocks.InMemoryStorage) CompletionException(java.util.concurrent.CompletionException) ReadResultEntryType(io.pravega.segmentstore.contracts.ReadResultEntryType) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) StreamSegmentMetadata(io.pravega.segmentstore.server.containers.StreamSegmentMetadata) List(java.util.List) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) TestUtils(io.pravega.test.common.TestUtils) Futures(io.pravega.common.concurrent.Futures) ReadResult(io.pravega.segmentstore.contracts.ReadResult) TestStorage(io.pravega.segmentstore.server.TestStorage) ObjectClosedException(io.pravega.common.ObjectClosedException) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Getter(lombok.Getter) Exceptions(io.pravega.common.Exceptions) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) EvictableMetadata(io.pravega.segmentstore.server.EvictableMetadata) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) CacheState(io.pravega.segmentstore.storage.cache.CacheState) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) ReusableLatch(io.pravega.common.util.ReusableLatch) StreamSegmentTruncatedException(io.pravega.segmentstore.contracts.StreamSegmentTruncatedException) NameUtils(io.pravega.shared.NameUtils) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) AtomicLong(java.util.concurrent.atomic.AtomicLong) Mockito(org.mockito.Mockito) Rule(org.junit.Rule) Assert(org.junit.Assert) Collections(java.util.Collections) ByteArraySegment(io.pravega.common.util.ByteArraySegment) Cleanup(lombok.Cleanup) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) TimeoutException(java.util.concurrent.TimeoutException) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) CancellationException(java.util.concurrent.CancellationException) CompletionException(java.util.concurrent.CompletionException) ObjectClosedException(io.pravega.common.ObjectClosedException) StreamSegmentTruncatedException(io.pravega.segmentstore.contracts.StreamSegmentTruncatedException) IntentionalException(io.pravega.test.common.IntentionalException) IOException(java.io.IOException) CachePolicy(io.pravega.segmentstore.server.CachePolicy) ReusableLatch(io.pravega.common.util.ReusableLatch) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ByteArrayInputStream(java.io.ByteArrayInputStream) CompletionException(java.util.concurrent.CompletionException) Test(org.junit.Test)

Example 85 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class ContainerReadIndexTests method testConcurrentEvictionTransactionStorageMerge.

/**
 * Tests a scenario where a call to {@link StreamSegmentReadIndex#completeMerge} executes concurrently with a
 * CacheManager eviction. The Cache Manager must not evict the data for recently transferred entries, even if they
 * would otherwise be eligible for eviction in the source segment.
 */
@Test
public void testConcurrentEvictionTransactionStorageMerge() throws Exception {
    val mergeOffset = 1;
    val appendLength = 1;
    CachePolicy cachePolicy = new CachePolicy(1, Duration.ZERO, Duration.ofMillis(1));
    @Cleanup TestContext context = new TestContext(DEFAULT_CONFIG, cachePolicy);
    // Create parent segment and one transaction
    long targetId = createSegment(0, context);
    long sourceId = createTransaction(1, context);
    val targetMetadata = context.metadata.getStreamSegmentMetadata(targetId);
    val sourceMetadata = context.metadata.getStreamSegmentMetadata(sourceId);
    createSegmentsInStorage(context);
    // Write something to the parent segment.
    appendSingleWrite(targetId, new ByteArraySegment(new byte[mergeOffset]), context);
    context.storage.openWrite(targetMetadata.getName()).thenCompose(handle -> context.storage.write(handle, 0, new ByteArrayInputStream(new byte[mergeOffset]), mergeOffset, TIMEOUT)).join();
    // Write something to the transaction, but do not write anything in Storage - we want to verify we don't even
    // try to reach in there.
    val sourceContents = getAppendData(context.metadata.getStreamSegmentMetadata(sourceId).getName(), sourceId, 0, 0);
    appendSingleWrite(sourceId, sourceContents, context);
    sourceMetadata.setStorageLength(sourceMetadata.getLength());
    // Seal & Begin-merge the transaction (do not seal in storage).
    sourceMetadata.markSealed();
    targetMetadata.setLength(sourceMetadata.getLength() + mergeOffset);
    context.readIndex.beginMerge(targetId, mergeOffset, sourceId);
    sourceMetadata.markMerged();
    sourceMetadata.markDeleted();
    // Trigger a Complete Merge. We want to intercept and pause it immediately before it is unregistered from the
    // Cache Manager.
    @Cleanup("release") val unregisterCalled = new ReusableLatch();
    @Cleanup("release") val unregisterBlocker = new ReusableLatch();
    context.cacheManager.setUnregisterInterceptor(c -> {
        unregisterCalled.release();
        Exceptions.handleInterrupted(unregisterBlocker::await);
    });
    val completeMerge = CompletableFuture.runAsync(() -> {
        try {
            context.readIndex.completeMerge(targetId, sourceId);
        } catch (Exception ex) {
            throw new CompletionException(ex);
        }
    }, executorService());
    // Clear the cache. The source Read index is still registered in the Cache Manager - we want to ensure that any
    // eviction happening at this point will not delete anything from the Cache that we don't want deleted.
    unregisterCalled.await();
    context.cacheManager.applyCachePolicy();
    // Wait for the operation to complete.
    unregisterBlocker.release();
    completeMerge.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    // Verify that we can append (appending will modify the last cache entry; if it had been modified this would not
    // work anymore).
    val appendOffset = (int) targetMetadata.getLength();
    val appendData = new byte[appendLength];
    appendData[0] = (byte) 23;
    targetMetadata.setLength(appendOffset + appendLength);
    context.readIndex.append(targetId, appendOffset, new ByteArraySegment(appendData));
    // Issue a read and verify we can read everything that we wrote. If it had been evicted or erroneously deleted
    // from the cache this would result in an error.
    byte[] expectedData = new byte[appendOffset + appendLength];
    sourceContents.copyTo(expectedData, mergeOffset, sourceContents.getLength());
    System.arraycopy(appendData, 0, expectedData, appendOffset, appendLength);
    ReadResult rr = context.readIndex.read(targetId, 0, expectedData.length, TIMEOUT);
    Assert.assertTrue("Parent Segment read indicates no data available.", rr.hasNext());
    byte[] actualData = new byte[expectedData.length];
    rr.readRemaining(actualData, TIMEOUT);
    Assert.assertArrayEquals("Unexpected data read back.", expectedData, actualData);
}
Also used : lombok.val(lombok.val) Arrays(java.util.Arrays) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) ReadOnlyStorage(io.pravega.segmentstore.storage.ReadOnlyStorage) RequiredArgsConstructor(lombok.RequiredArgsConstructor) TimeoutException(java.util.concurrent.TimeoutException) Cleanup(lombok.Cleanup) Random(java.util.Random) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Map(java.util.Map) CachePolicy(io.pravega.segmentstore.server.CachePolicy) TestCacheManager(io.pravega.segmentstore.server.TestCacheManager) CancellationException(java.util.concurrent.CancellationException) Collection(java.util.Collection) InMemoryStorage(io.pravega.segmentstore.storage.mocks.InMemoryStorage) CompletionException(java.util.concurrent.CompletionException) ReadResultEntryType(io.pravega.segmentstore.contracts.ReadResultEntryType) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) StreamSegmentMetadata(io.pravega.segmentstore.server.containers.StreamSegmentMetadata) List(java.util.List) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) TestUtils(io.pravega.test.common.TestUtils) Futures(io.pravega.common.concurrent.Futures) ReadResult(io.pravega.segmentstore.contracts.ReadResult) TestStorage(io.pravega.segmentstore.server.TestStorage) ObjectClosedException(io.pravega.common.ObjectClosedException) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Getter(lombok.Getter) Exceptions(io.pravega.common.Exceptions) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) EvictableMetadata(io.pravega.segmentstore.server.EvictableMetadata) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) CacheState(io.pravega.segmentstore.storage.cache.CacheState) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) ReusableLatch(io.pravega.common.util.ReusableLatch) StreamSegmentTruncatedException(io.pravega.segmentstore.contracts.StreamSegmentTruncatedException) NameUtils(io.pravega.shared.NameUtils) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) AtomicLong(java.util.concurrent.atomic.AtomicLong) Mockito(org.mockito.Mockito) Rule(org.junit.Rule) Assert(org.junit.Assert) Collections(java.util.Collections) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ReadResult(io.pravega.segmentstore.contracts.ReadResult) Cleanup(lombok.Cleanup) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) TimeoutException(java.util.concurrent.TimeoutException) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) CancellationException(java.util.concurrent.CancellationException) CompletionException(java.util.concurrent.CompletionException) ObjectClosedException(io.pravega.common.ObjectClosedException) StreamSegmentTruncatedException(io.pravega.segmentstore.contracts.StreamSegmentTruncatedException) IntentionalException(io.pravega.test.common.IntentionalException) IOException(java.io.IOException) CachePolicy(io.pravega.segmentstore.server.CachePolicy) ReusableLatch(io.pravega.common.util.ReusableLatch) ByteArrayInputStream(java.io.ByteArrayInputStream) CompletionException(java.util.concurrent.CompletionException) Test(org.junit.Test)

Aggregations

Timeout (org.junit.rules.Timeout)92 Rule (org.junit.Rule)91 Test (org.junit.Test)91 Assert (org.junit.Assert)84 AssertExtensions (io.pravega.test.common.AssertExtensions)81 Duration (java.time.Duration)81 ArrayList (java.util.ArrayList)80 lombok.val (lombok.val)79 TimeUnit (java.util.concurrent.TimeUnit)78 ThreadPooledTestSuite (io.pravega.test.common.ThreadPooledTestSuite)77 Cleanup (lombok.Cleanup)73 Collections (java.util.Collections)72 CompletableFuture (java.util.concurrent.CompletableFuture)72 HashMap (java.util.HashMap)69 ByteArraySegment (io.pravega.common.util.ByteArraySegment)68 Collectors (java.util.stream.Collectors)68 List (java.util.List)66 AtomicReference (java.util.concurrent.atomic.AtomicReference)66 IntentionalException (io.pravega.test.common.IntentionalException)62 Map (java.util.Map)62