Search in sources :

Example 66 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class SegmentAggregatorTests method testRecoveryEmptyMergeOperation.

/**
 * Tests a scenario where a MergeSegmentOperation needs to be recovered but which has already been merged in Storage.
 */
@Test
public void testRecoveryEmptyMergeOperation() throws Exception {
    @Cleanup TestContext context = new TestContext(DEFAULT_CONFIG);
    // Create a parent segment and one transaction segment.
    context.segmentAggregator.initialize(TIMEOUT).join();
    // Part 1: When the source segment is missing from Storage, but metadata does not reflect that.
    SegmentAggregator ta0 = context.transactionAggregators[0];
    context.storage.create(ta0.getMetadata().getName(), TIMEOUT).join();
    context.storage.openWrite(ta0.getMetadata().getName()).thenCompose(txnHandle -> context.storage.seal(txnHandle, TIMEOUT)).join();
    val txn0Metadata = context.containerMetadata.getStreamSegmentMetadata(ta0.getMetadata().getId());
    txn0Metadata.markSealed();
    txn0Metadata.markSealedInStorage();
    ta0.initialize(TIMEOUT).join();
    context.storage.delete(context.storage.openWrite(txn0Metadata.getName()).join(), TIMEOUT).join();
    // This is the operation that should be reconciled.
    context.segmentAggregator.add(generateMergeTransactionAndUpdateMetadata(ta0.getMetadata().getId(), context));
    // Verify the operation was ack-ed.
    AtomicBoolean mergeAcked = new AtomicBoolean();
    context.dataSource.setCompleteMergeCallback((target, source) -> mergeAcked.set(true));
    context.segmentAggregator.flush(TIMEOUT).join();
    Assert.assertTrue("Merge was not ack-ed for deleted source segment.", mergeAcked.get());
    // Part 2: When the source segment's metadata indicates it was deleted.
    SegmentAggregator ta1 = context.transactionAggregators[1];
    context.storage.create(ta1.getMetadata().getName(), TIMEOUT).join();
    context.storage.openWrite(ta1.getMetadata().getName()).thenCompose(txnHandle -> context.storage.seal(txnHandle, TIMEOUT)).join();
    val txn1Metadata = context.containerMetadata.getStreamSegmentMetadata(ta1.getMetadata().getId());
    txn1Metadata.markDeleted();
    // This is the operation that should be reconciled.
    context.segmentAggregator.add(generateMergeTransactionAndUpdateMetadata(ta1.getMetadata().getId(), context));
    // Verify the operation was ack-ed.
    mergeAcked.set(false);
    context.dataSource.setCompleteMergeCallback((target, source) -> mergeAcked.set(true));
    context.segmentAggregator.flush(TIMEOUT).join();
    // Finally, verify that all operations were ack-ed back.
    Assert.assertTrue("Merge was not ack-ed for deleted source segment.", mergeAcked.get());
}
Also used : Arrays(java.util.Arrays) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) AssertExtensions(io.pravega.test.common.AssertExtensions) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) RequiredArgsConstructor(lombok.RequiredArgsConstructor) Cleanup(lombok.Cleanup) Random(java.util.Random) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Map(java.util.Map) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) StreamSegmentTruncateOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentTruncateOperation) Attributes(io.pravega.segmentstore.contracts.Attributes) InMemoryStorage(io.pravega.segmentstore.storage.mocks.InMemoryStorage) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) ByteBufferOutputStream(io.pravega.common.io.ByteBufferOutputStream) Stream(java.util.stream.Stream) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) Futures(io.pravega.common.concurrent.Futures) TestStorage(io.pravega.segmentstore.server.TestStorage) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) Timeout(org.junit.rules.Timeout) OutputStream(java.io.OutputStream) ManualTimer(io.pravega.segmentstore.server.ManualTimer) UpdateAttributesOperation(io.pravega.segmentstore.server.logs.operations.UpdateAttributesOperation) AttributeId(io.pravega.segmentstore.contracts.AttributeId) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) AbstractMap(java.util.AbstractMap) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) Rule(org.junit.Rule) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) TreeMap(java.util.TreeMap) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Preconditions(com.google.common.base.Preconditions) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) RandomFactory(io.pravega.common.hash.RandomFactory) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) Collections(java.util.Collections) DeleteSegmentOperation(io.pravega.segmentstore.server.logs.operations.DeleteSegmentOperation) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) lombok.val(lombok.val) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Cleanup(lombok.Cleanup) Test(org.junit.Test)

Example 67 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class SegmentAggregatorTests method testEmptySegment.

/**
 * Tests the ability to process and flush various Operations on empty (not yet created) Segments:
 * - Append
 * - Seal
 * - Truncate
 * - Merge (empty source or targets)
 */
@Test
public void testEmptySegment() throws Exception {
    final int appendLength = DEFAULT_CONFIG.getFlushThresholdBytes();
    @Cleanup TestContext context = new TestContext(DEFAULT_CONFIG);
    val append = context.transactionAggregators[0];
    val seal = context.transactionAggregators[1];
    val truncate = context.transactionAggregators[2];
    // Empty SOURCE to non-empty target.
    val mergeEmptySource = context.transactionAggregators[3];
    // Empty source to non-empty TARGET.
    val mergeEmptySourceTarget = context.transactionAggregators[4];
    // Empty/non-empty source to empty TARGET.
    val mergeEmptyTarget = context.transactionAggregators[5];
    // Empty SOURCE to empty target.
    val mergeEmptyTargetEmptySource = context.transactionAggregators[6];
    // Non-empty SOURCE to empty target.
    val mergeEmptyTargetNonEmptySource = context.transactionAggregators[7];
    val allAggregators = new SegmentAggregator[] { append, seal, truncate, mergeEmptySource, mergeEmptySourceTarget, mergeEmptyTarget, mergeEmptyTargetEmptySource, mergeEmptyTargetNonEmptySource };
    val finalAggregators = new SegmentAggregator[] { append, seal, truncate, mergeEmptySourceTarget, mergeEmptyTarget };
    // Create zero-length segment which will be used as a target of a merge with a zero-length, not-yet-created segment.
    context.storage.create(mergeEmptySourceTarget.getMetadata().getName(), TIMEOUT).join();
    // Create a non-zero-length segment which will be used to merge into empty target.
    val nonEmptySourceMetadata = (UpdateableSegmentMetadata) mergeEmptyTargetNonEmptySource.getMetadata();
    nonEmptySourceMetadata.setLength(1L);
    nonEmptySourceMetadata.setStorageLength(1L);
    nonEmptySourceMetadata.markSealed();
    context.storage.create(nonEmptySourceMetadata.getName(), TIMEOUT).thenCompose(v -> context.storage.openWrite(mergeEmptyTargetNonEmptySource.getMetadata().getName())).thenCompose(handle -> context.storage.write(handle, 0L, new ByteArrayInputStream(new byte[1]), 1, TIMEOUT).thenCompose(v -> context.storage.seal(handle, TIMEOUT))).join();
    // Initialize all the aggregators now, before adding operations for processing.
    for (val a : allAggregators) {
        a.initialize(TIMEOUT).join();
    }
    // Append on empty segment.
    append.add(generateAppendAndUpdateMetadata(append.getMetadata().getId(), new byte[appendLength], context));
    append.flush(TIMEOUT).join();
    Assert.assertEquals("Unexpected segment length after first write.", appendLength, context.storage.getStreamSegmentInfo(append.getMetadata().getName(), TIMEOUT).join().getLength());
    // Seal on empty segment.
    seal.add(generateSealAndUpdateMetadata(seal.getMetadata().getId(), context));
    seal.flush(TIMEOUT).join();
    Assert.assertTrue("Unexpected Metadata.isSealedInStorage after seal.", seal.getMetadata().isSealedInStorage());
    Assert.assertFalse("Not expecting segment to have been created in storage after seal.", context.storage.exists(seal.getMetadata().getName(), TIMEOUT).join());
    // Truncate on empty segment (a no-op).
    truncate.add(generateTruncateAndUpdateMetadata(truncate.getMetadata().getId(), context));
    truncate.flush(TIMEOUT).join();
    Assert.assertFalse("Not expecting segment to have been created in storage after truncate.", context.storage.exists(truncate.getMetadata().getName(), TIMEOUT).join());
    // Merge a zero-length, not-yet-created segment into a zero-length, created segment.
    mergeEmptySourceTarget.add(generateMergeTransactionAndUpdateMetadata(mergeEmptySourceTarget.getMetadata().getId(), mergeEmptySource.getMetadata().getId(), context));
    mergeEmptySourceTarget.flush(TIMEOUT).join();
    Assert.assertFalse("Merge source was created for initially empty segment.", context.storage.exists(mergeEmptySource.getMetadata().getName(), TIMEOUT).join());
    Assert.assertEquals("Unexpected length of pre-existing target segment after merge with empty segment.", 0, context.storage.getStreamSegmentInfo(mergeEmptySourceTarget.getMetadata().getName(), TIMEOUT).join().getLength());
    Assert.assertTrue("Unexpected Metadata.IsDeletedInStorage for empty source", mergeEmptySource.getMetadata().isDeletedInStorage());
    // Merge an empty source into an empty target.
    mergeEmptyTarget.add(generateMergeTransactionAndUpdateMetadata(mergeEmptyTarget.getMetadata().getId(), mergeEmptyTargetEmptySource.getMetadata().getId(), context));
    mergeEmptyTarget.flush(TIMEOUT).join();
    Assert.assertFalse("Merge source was created for initially empty segment.", context.storage.exists(mergeEmptyTargetEmptySource.getMetadata().getName(), TIMEOUT).join());
    Assert.assertFalse("Merge target was created for initially empty segment.", context.storage.exists(mergeEmptyTarget.getMetadata().getName(), TIMEOUT).join());
    Assert.assertTrue("Unexpected Metadata.IsDeletedInStorage for empty source", mergeEmptyTargetEmptySource.getMetadata().isDeletedInStorage());
    // Merge a non-empty source segment into an empty target.
    mergeEmptyTarget.add(generateMergeTransactionAndUpdateMetadata(mergeEmptyTarget.getMetadata().getId(), mergeEmptyTargetNonEmptySource.getMetadata().getId(), context));
    mergeEmptyTarget.flush(TIMEOUT).join();
    Assert.assertFalse("Merge source still exists for initially non-empty segment.", context.storage.exists(mergeEmptyTargetNonEmptySource.getMetadata().getName(), TIMEOUT).join());
    Assert.assertEquals("Unexpected length of target segment after merge with non-empty segment.", 1, context.storage.getStreamSegmentInfo(mergeEmptyTarget.getMetadata().getName(), TIMEOUT).join().getLength());
    Assert.assertTrue("Unexpected Metadata.IsDeletedInStorage for empty source", mergeEmptyTargetNonEmptySource.getMetadata().isDeletedInStorage());
    // Finally, check that everything was marked as flushed out of the aggregators.
    for (val a : finalAggregators) {
        Assert.assertFalse("Unexpected mustFlush() after flush", a.mustFlush());
        AssertExtensions.assertLessThan("Unexpected LUSN after flush.", 0, a.getLowestUncommittedSequenceNumber());
    }
}
Also used : lombok.val(lombok.val) Arrays(java.util.Arrays) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) AssertExtensions(io.pravega.test.common.AssertExtensions) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) RequiredArgsConstructor(lombok.RequiredArgsConstructor) Cleanup(lombok.Cleanup) Random(java.util.Random) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Map(java.util.Map) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) StreamSegmentTruncateOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentTruncateOperation) Attributes(io.pravega.segmentstore.contracts.Attributes) InMemoryStorage(io.pravega.segmentstore.storage.mocks.InMemoryStorage) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) ByteBufferOutputStream(io.pravega.common.io.ByteBufferOutputStream) Stream(java.util.stream.Stream) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) Futures(io.pravega.common.concurrent.Futures) TestStorage(io.pravega.segmentstore.server.TestStorage) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) Timeout(org.junit.rules.Timeout) OutputStream(java.io.OutputStream) ManualTimer(io.pravega.segmentstore.server.ManualTimer) UpdateAttributesOperation(io.pravega.segmentstore.server.logs.operations.UpdateAttributesOperation) AttributeId(io.pravega.segmentstore.contracts.AttributeId) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) AbstractMap(java.util.AbstractMap) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) Rule(org.junit.Rule) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) TreeMap(java.util.TreeMap) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Preconditions(com.google.common.base.Preconditions) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) RandomFactory(io.pravega.common.hash.RandomFactory) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) Collections(java.util.Collections) DeleteSegmentOperation(io.pravega.segmentstore.server.logs.operations.DeleteSegmentOperation) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) ByteArrayInputStream(java.io.ByteArrayInputStream) Cleanup(lombok.Cleanup) Test(org.junit.Test)

Example 68 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class SegmentAggregatorTests method testReconcileDelete.

/**
 * Tests the ability to reconcile a {@link DeleteSegmentOperation} on Segments in various states:
 * - Empty (not yet created).
 * - Empty (created, but no data).
 * - Not empty, not sealed.
 * - Sealed (empty or not).
 *
 * Reconciling a {@link DeleteSegmentOperation} is different from any other operation. Even if there are other
 * operations to reconcile, the simple presence of a Delete will bypass any other one and simply delete the segment.
 */
@Test
public void testReconcileDelete() throws Exception {
    final int appendLength = DEFAULT_CONFIG.getFlushThresholdBytes();
    @Cleanup TestContext context = new TestContext(DEFAULT_CONFIG);
    val notExistsWithAppend = context.transactionAggregators[0];
    val existsWithAppend = context.transactionAggregators[1];
    val existsWithSeal = context.transactionAggregators[2];
    val allAggregators = new SegmentAggregator[] { notExistsWithAppend, existsWithAppend, existsWithSeal };
    for (val a : allAggregators) {
        // Create the segment, and add 1 byte to it. This will cause initialize() to not treat it as empty.
        context.storage.create(a.getMetadata().getName(), TIMEOUT).thenCompose(v -> context.storage.openWrite(a.getMetadata().getName())).thenCompose(handle -> {
            ((UpdateableSegmentMetadata) a.getMetadata()).setLength(1L);
            ((UpdateableSegmentMetadata) a.getMetadata()).setStorageLength(1L);
            return context.storage.write(handle, 0, new ByteArrayInputStream(new byte[] { 1 }), 1, TIMEOUT);
        }).thenCompose(v -> a.initialize(TIMEOUT)).join();
        // Add enough data to trigger a flush.
        a.add(generateAppendAndUpdateMetadata(a.getMetadata().getId(), new byte[appendLength], context));
        if (a == existsWithSeal) {
            // Add a Seal for that segment that should be sealed.
            a.add(generateSealAndUpdateMetadata(existsWithSeal.getMetadata().getId(), context));
        }
        // Delete the Segment from Storage.
        Futures.exceptionallyExpecting(context.storage.openWrite(a.getMetadata().getName()).thenCompose(handle -> context.storage.delete(handle, TIMEOUT)), ex -> ex instanceof StreamSegmentNotExistsException, null).join();
        Assert.assertTrue("Unexpected value from mustFlush() before first flush().", a.mustFlush());
        // First attempt should fail.
        AssertExtensions.assertSuppliedFutureThrows("First invocation of flush() should fail.", () -> a.flush(TIMEOUT), ex -> ex instanceof StreamSegmentNotExistsException);
        Assert.assertTrue("Unexpected value from mustFlush() after failed flush().", a.mustFlush());
        // Add the DeleteSegmentOperation - this should cause reconciliation to succeed.
        a.add(generateDeleteAndUpdateMetadata(a.getMetadata().getId(), context));
        a.flush(TIMEOUT).join();
        Assert.assertFalse("Unexpected value from mustFlush() after Deletion.", a.mustFlush());
        AssertExtensions.assertLessThan("Unexpected LUSN after flush.", 0, a.getLowestUncommittedSequenceNumber());
        Assert.assertTrue("Unexpected value from isDeleted() after Deletion.", a.getMetadata().isDeleted());
        Assert.assertTrue("Unexpected value from isDeletedInStorage() after Deletion.", a.getMetadata().isDeletedInStorage());
        // Verify that no segment exists in Storage after the flush.
        boolean existsInStorage = context.storage.exists(a.getMetadata().getName(), TIMEOUT).join();
        Assert.assertFalse("Segment still exists in Storage after Deletion.", existsInStorage);
    }
}
Also used : lombok.val(lombok.val) Arrays(java.util.Arrays) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) AssertExtensions(io.pravega.test.common.AssertExtensions) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) RequiredArgsConstructor(lombok.RequiredArgsConstructor) Cleanup(lombok.Cleanup) Random(java.util.Random) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Map(java.util.Map) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) StreamSegmentTruncateOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentTruncateOperation) Attributes(io.pravega.segmentstore.contracts.Attributes) InMemoryStorage(io.pravega.segmentstore.storage.mocks.InMemoryStorage) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) ByteBufferOutputStream(io.pravega.common.io.ByteBufferOutputStream) Stream(java.util.stream.Stream) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) Futures(io.pravega.common.concurrent.Futures) TestStorage(io.pravega.segmentstore.server.TestStorage) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) Timeout(org.junit.rules.Timeout) OutputStream(java.io.OutputStream) ManualTimer(io.pravega.segmentstore.server.ManualTimer) UpdateAttributesOperation(io.pravega.segmentstore.server.logs.operations.UpdateAttributesOperation) AttributeId(io.pravega.segmentstore.contracts.AttributeId) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) AbstractMap(java.util.AbstractMap) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) Rule(org.junit.Rule) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) TreeMap(java.util.TreeMap) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Preconditions(com.google.common.base.Preconditions) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) RandomFactory(io.pravega.common.hash.RandomFactory) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) Collections(java.util.Collections) DeleteSegmentOperation(io.pravega.segmentstore.server.logs.operations.DeleteSegmentOperation) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) ByteArrayInputStream(java.io.ByteArrayInputStream) Cleanup(lombok.Cleanup) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) Test(org.junit.Test)

Example 69 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class TableServiceTests method check.

private void check(HashMap<BufferView, EntryData> keyInfo, TableStore tableStore) throws Exception {
    val bySegment = keyInfo.entrySet().stream().collect(Collectors.groupingBy(e -> e.getValue().segmentName));
    // Check inexistent keys.
    val searchFutures = new ArrayList<CompletableFuture<List<TableEntry>>>();
    val iteratorFutures = new ArrayList<CompletableFuture<List<TableEntry>>>();
    // Delta Iteration does not support fixed-key-length TableSegments.
    val unsortedIteratorFutures = new ArrayList<CompletableFuture<List<TableEntry>>>();
    val offsetIteratorFutures = new ArrayList<CompletableFuture<List<IteratorItem<TableEntry>>>>();
    val expectedResult = new ArrayList<Map.Entry<BufferView, EntryData>>();
    for (val e : bySegment.entrySet()) {
        String segmentName = e.getKey();
        boolean fixedKeyLength = isFixedKeyLength(segmentName);
        val info = tableStore.getInfo(segmentName, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
        Assert.assertEquals(segmentName, info.getName());
        AssertExtensions.assertGreaterThan("Unexpected length for " + segmentName, 0, info.getLength());
        val expectedKeyLength = isFixedKeyLength(segmentName) ? getFixedKeyLength(segmentName) : 0;
        Assert.assertEquals("Unexpected key length for " + segmentName, expectedKeyLength, info.getKeyLength());
        Assert.assertEquals(fixedKeyLength, info.getType().isFixedKeyLengthTableSegment());
        val keys = new ArrayList<BufferView>();
        for (val se : e.getValue()) {
            keys.add(se.getKey());
            expectedResult.add(se);
        }
        searchFutures.add(tableStore.get(segmentName, keys, TIMEOUT));
        CompletableFuture<List<TableEntry>> entryIteratorFuture = tableStore.entryIterator(segmentName, IteratorArgs.builder().fetchTimeout(TIMEOUT).build()).thenCompose(ei -> {
            val result = new ArrayList<TableEntry>();
            return ei.forEachRemaining(i -> result.addAll(i.getEntries()), executorService()).thenApply(v -> {
                if (fixedKeyLength) {
                    checkSortedOrder(result);
                }
                return result;
            });
        });
        iteratorFutures.add(entryIteratorFuture);
        if (!fixedKeyLength) {
            unsortedIteratorFutures.add(entryIteratorFuture);
            // For simplicity, always start from beginning of TableSegment.
            offsetIteratorFutures.add(tableStore.entryDeltaIterator(segmentName, 0L, TIMEOUT).thenCompose(ei -> {
                val result = new ArrayList<IteratorItem<TableEntry>>();
                return ei.forEachRemaining(result::add, executorService()).thenApply(v -> result);
            }));
        }
    }
    // Check search results.
    val actualResults = Futures.allOfWithResults(searchFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).stream().flatMap(List::stream).collect(Collectors.toList());
    Assert.assertEquals("Unexpected number of search results.", expectedResult.size(), actualResults.size());
    for (int i = 0; i < expectedResult.size(); i++) {
        val expectedKey = expectedResult.get(i).getKey();
        val expectedEntry = expectedResult.get(i).getValue();
        val actual = actualResults.get(i);
        if (expectedEntry.isDeleted()) {
            // Deleted keys will be returned as nulls.
            if (actual != null) {
                val r2 = tableStore.get(expectedEntry.segmentName, Collections.singletonList(expectedKey), TIMEOUT).join();
            }
            Assert.assertNull("Not expecting a value for a deleted Key ", actual);
        } else {
            Assert.assertEquals("Unexpected value for non-deleted Key.", expectedEntry.getValue(), actual.getValue());
            Assert.assertEquals("Unexpected key for non-deleted Key.", expectedKey, actual.getKey().getKey());
            Assert.assertEquals("Unexpected TableKey.Version for non-deleted Key.", expectedEntry.getVersion(), actual.getKey().getVersion());
        }
    }
    // Check iterator results. We sort it (and actualResults) by Version/Offset to ease the comparison.
    val actualIteratorResults = Futures.allOfWithResults(iteratorFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).stream().flatMap(List::stream).sorted(Comparator.comparingLong(e -> e.getKey().getVersion())).collect(Collectors.toList());
    val expectedIteratorResults = actualResults.stream().filter(Objects::nonNull).sorted(Comparator.comparingLong(e -> e.getKey().getVersion())).collect(Collectors.toList());
    // These lists are used to compare non-delta based iteration with delta based iteration.
    val actualUnsortedIteratorResults = Futures.allOfWithResults(unsortedIteratorFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).stream().flatMap(List::stream).sorted(Comparator.comparingLong(e -> e.getKey().getVersion())).collect(Collectors.toList());
    val expectedUnsortedIteratorResults = actualUnsortedIteratorResults.stream().filter(Objects::nonNull).sorted(Comparator.comparingLong(e -> e.getKey().getVersion())).collect(Collectors.toList());
    val actualOffsetIteratorList = Futures.allOfWithResults(offsetIteratorFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).stream().flatMap(List::stream).collect(Collectors.toList());
    val actualOffsetIteratorResults = processDeltaIteratorItems(actualOffsetIteratorList).stream().sorted(Comparator.comparingLong(e -> e.getKey().getVersion())).collect(Collectors.toList());
    AssertExtensions.assertListEquals("Unexpected result from entryIterator().", expectedIteratorResults, actualIteratorResults, TableEntry::equals);
    for (val entry : expectedUnsortedIteratorResults) {
        Assert.assertNotNull("Missing expected TableEntry from deltaEntryIterator()", actualOffsetIteratorResults.contains(entry));
    }
}
Also used : lombok.val(lombok.val) BufferViewComparator(io.pravega.common.util.BufferViewComparator) TableSegmentConfig(io.pravega.segmentstore.contracts.tables.TableSegmentConfig) WriterConfig(io.pravega.segmentstore.server.writer.WriterConfig) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) AssertExtensions(io.pravega.test.common.AssertExtensions) RequiredArgsConstructor(lombok.RequiredArgsConstructor) ServiceConfig(io.pravega.segmentstore.server.store.ServiceConfig) HashMap(java.util.HashMap) Random(java.util.Random) CompletableFuture(java.util.concurrent.CompletableFuture) ServiceBuilderConfig(io.pravega.segmentstore.server.store.ServiceBuilderConfig) AtomicReference(java.util.concurrent.atomic.AtomicReference) IteratorArgs(io.pravega.segmentstore.contracts.tables.IteratorArgs) ArrayList(java.util.ArrayList) SegmentType(io.pravega.segmentstore.contracts.SegmentType) ServiceBuilder(io.pravega.segmentstore.server.store.ServiceBuilder) IteratorItem(io.pravega.segmentstore.contracts.tables.IteratorItem) BufferView(io.pravega.common.util.BufferView) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Duration(java.time.Duration) Map(java.util.Map) After(org.junit.After) ContainerConfig(io.pravega.segmentstore.server.containers.ContainerConfig) ReadIndexConfig(io.pravega.segmentstore.server.reading.ReadIndexConfig) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Timeout(org.junit.rules.Timeout) InMemoryDurableDataLogFactory(io.pravega.segmentstore.storage.mocks.InMemoryDurableDataLogFactory) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) Before(org.junit.Before) TableKey(io.pravega.segmentstore.contracts.tables.TableKey) DurableLogConfig(io.pravega.segmentstore.server.logs.DurableLogConfig) lombok.val(lombok.val) Test(org.junit.Test) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) Rule(org.junit.Rule) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) Comparator(java.util.Comparator) Assert(org.junit.Assert) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Collections(java.util.Collections) Futures(io.pravega.common.concurrent.Futures) ArrayList(java.util.ArrayList) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) BufferView(io.pravega.common.util.BufferView) IteratorItem(io.pravega.segmentstore.contracts.tables.IteratorItem) Objects(java.util.Objects) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map)

Example 70 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class StreamSegmentContainerTests method testTableSegmentReadAfterCompactionAndRecovery.

/**
 * Tests a non-trivial scenario in which ContainerKeyIndex may be tail-caching a stale version of a key if the
 * following conditions occur:
 * 1. StorageWriter processes values v0...vn for k1 and {@link WriterTableProcessor} indexes them.
 * 2. As a result of {@link WriterTableProcessor} activity, the last value vn for k1 is moved to the tail of the Segment.
 * 3. While TableCompactor works, a new PUT operation is appended to the Segment with new value vn+1 for k1.
 * 4. At this point, the StorageWriter stops its progress and the container restarts without processing neither the
 *    new value vn+1 nor the compacted value vn for k1.
 * 5. A subsequent restart will trigger the tail-caching from the last indexed offset, which points to vn+1.
 * 6. The bug, which consists of the tail-caching process not taking care of table entry versions, would overwrite
 *    vn+1 with vn, just because it has a higher offset as it was written later in the Segment.
 */
@Test
public void testTableSegmentReadAfterCompactionAndRecovery() throws Exception {
    @Cleanup TestContext context = new TestContext(DEFAULT_CONFIG, NO_TRUNCATIONS_DURABLE_LOG_CONFIG, DEFAULT_WRITER_CONFIG, null);
    val durableLog = new AtomicReference<OperationLog>();
    val durableLogFactory = new WatchableOperationLogFactory(context.operationLogFactory, durableLog::set);
    // Data size and count to be written in this test.
    int serializedEntryLength = 28;
    int writtenEntries = 7;
    @Cleanup StreamSegmentContainer container = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
    container.startAsync().awaitRunning();
    Assert.assertNotNull(durableLog.get());
    val tableStore = container.getExtension(ContainerTableExtension.class);
    // 1. Create the Table Segment and get a DirectSegmentAccess to it to monitor its size.
    String tableSegmentName = getSegmentName(0) + "_Table";
    val type = SegmentType.builder(getSegmentType(tableSegmentName)).tableSegment().build();
    tableStore.createSegment(tableSegmentName, type, TIMEOUT).join();
    DirectSegmentAccess directTableSegment = container.forSegment(tableSegmentName, TIMEOUT).join();
    // 2. Add some entries to the table segments. Note tha we write multiple values to each key, so the TableCompactor
    // can find entries to move to the tail.
    final BiFunction<String, Integer, TableEntry> createTableEntry = (key, value) -> TableEntry.unversioned(new ByteArraySegment(key.getBytes()), new ByteArraySegment(String.format("Value_%s", value).getBytes()));
    // 3. This callback will run when the StorageWriter writes data to Storage. At this point, StorageWriter would
    // have completed its first iteration, so it is the time to add a new value for key1 while TableCompactor is working.
    val compactedEntry = List.of(TableEntry.versioned(new ByteArraySegment("key1".getBytes(StandardCharsets.UTF_8)), new ByteArraySegment("3".getBytes(StandardCharsets.UTF_8)), serializedEntryLength * 2L));
    // Simulate that Table Compactor moves [k1, 3] to the tail of the Segment as a result of compacting the first 4 entries.
    val compactedEntryUpdate = EntrySerializerTests.generateUpdateWithExplicitVersion(compactedEntry);
    CompletableFuture<Void> callbackExecuted = new CompletableFuture<>();
    context.storageFactory.getPostWriteCallback().set((segmentHandle, offset) -> {
        if (segmentHandle.getSegmentName().contains("Segment_0_Table$attributes.index") && !callbackExecuted.isDone()) {
            // New PUT with the newest value.
            Futures.toVoid(tableStore.put(tableSegmentName, Collections.singletonList(createTableEntry.apply("key1", 4)), TIMEOUT)).join();
            // Simulates a compacted entry append performed by Table Compactor.
            directTableSegment.append(compactedEntryUpdate, null, TIMEOUT).join();
            callbackExecuted.complete(null);
        }
    });
    // Do the actual puts.
    Futures.toVoid(tableStore.put(tableSegmentName, Collections.singletonList(createTableEntry.apply("key1", 1)), TIMEOUT)).join();
    Futures.toVoid(tableStore.put(tableSegmentName, Collections.singletonList(createTableEntry.apply("key1", 2)), TIMEOUT)).join();
    Futures.toVoid(tableStore.put(tableSegmentName, Collections.singletonList(createTableEntry.apply("key1", 3)), TIMEOUT)).join();
    Futures.toVoid(tableStore.put(tableSegmentName, Collections.singletonList(createTableEntry.apply("key2", 1)), TIMEOUT)).join();
    Futures.toVoid(tableStore.put(tableSegmentName, Collections.singletonList(createTableEntry.apply("key2", 2)), TIMEOUT)).join();
    Futures.toVoid(tableStore.put(tableSegmentName, Collections.singletonList(createTableEntry.apply("key2", 3)), TIMEOUT)).join();
    // 4. Above, the test does 7 puts, each one 28 bytes in size (6 entries directly, 1 via callback). Now, we need
    // to wait for the TableCompactor writing the entry (key1, 3) to the tail of the Segment.
    callbackExecuted.join();
    AssertExtensions.assertEventuallyEquals(true, () -> directTableSegment.getInfo().getLength() > (long) serializedEntryLength * writtenEntries, 5000);
    // 5. The TableCompactor has moved the entry, so we immediately stop the container to prevent StorageWriter from
    // making more progress.
    container.close();
    // 6. Create a new container instance that will recover from existing data.
    @Cleanup val container2 = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
    container2.startAsync().awaitRunning();
    // 7. Verify that (key1, 4) is the actual value after performing the tail-caching process, which now takes care
    // of entry versions.
    val expected = createTableEntry.apply("key1", 4);
    val tableStore2 = container2.getExtension(ContainerTableExtension.class);
    val actual = tableStore2.get(tableSegmentName, Collections.singletonList(expected.getKey().getKey()), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(0);
    Assert.assertEquals(actual.getKey().getKey(), expected.getKey().getKey());
    Assert.assertEquals(actual.getValue(), expected.getValue());
}
Also used : lombok.val(lombok.val) Arrays(java.util.Arrays) Storage(io.pravega.segmentstore.storage.Storage) StreamSegmentInformation(io.pravega.segmentstore.contracts.StreamSegmentInformation) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) ContainerEventProcessor(io.pravega.segmentstore.server.ContainerEventProcessor) Cleanup(lombok.Cleanup) StorageWriterFactory(io.pravega.segmentstore.server.writer.StorageWriterFactory) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) Future(java.util.concurrent.Future) ContainerTableExtensionImpl(io.pravega.segmentstore.server.tables.ContainerTableExtensionImpl) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Duration(java.time.Duration) Map(java.util.Map) CachePolicy(io.pravega.segmentstore.server.CachePolicy) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) AsyncReadResultProcessor(io.pravega.segmentstore.server.reading.AsyncReadResultProcessor) ContainerReadIndexFactory(io.pravega.segmentstore.server.reading.ContainerReadIndexFactory) InMemoryDurableDataLogFactory(io.pravega.segmentstore.storage.mocks.InMemoryDurableDataLogFactory) DurableLogFactory(io.pravega.segmentstore.server.logs.DurableLogFactory) Attributes(io.pravega.segmentstore.contracts.Attributes) DurableLogConfig(io.pravega.segmentstore.server.logs.DurableLogConfig) Writer(io.pravega.segmentstore.server.Writer) StandardCharsets(java.nio.charset.StandardCharsets) Stream(java.util.stream.Stream) SegmentContainerFactory(io.pravega.segmentstore.server.SegmentContainerFactory) ContainerTableExtension(io.pravega.segmentstore.server.tables.ContainerTableExtension) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) SyncStorage(io.pravega.segmentstore.storage.SyncStorage) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) TestUtils(io.pravega.test.common.TestUtils) Futures(io.pravega.common.concurrent.Futures) CacheManager(io.pravega.segmentstore.server.CacheManager) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IllegalContainerStateException(io.pravega.segmentstore.server.IllegalContainerStateException) TooManyActiveSegmentsException(io.pravega.segmentstore.contracts.TooManyActiveSegmentsException) EntrySerializerTests(io.pravega.segmentstore.server.tables.EntrySerializerTests) Exceptions(io.pravega.common.Exceptions) StorageFactory(io.pravega.segmentstore.storage.StorageFactory) BadAttributeUpdateException(io.pravega.segmentstore.contracts.BadAttributeUpdateException) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentType(io.pravega.segmentstore.contracts.SegmentType) Runnables(com.google.common.util.concurrent.Runnables) AttributeIndexConfig(io.pravega.segmentstore.server.attributes.AttributeIndexConfig) ReadIndexConfig(io.pravega.segmentstore.server.reading.ReadIndexConfig) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) WriterTableProcessor(io.pravega.segmentstore.server.tables.WriterTableProcessor) ConfigurationException(io.pravega.common.util.ConfigurationException) SegmentContainerExtension(io.pravega.segmentstore.server.SegmentContainerExtension) WriterFactory(io.pravega.segmentstore.server.WriterFactory) Properties(java.util.Properties) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) Executor(java.util.concurrent.Executor) AttributeId(io.pravega.segmentstore.contracts.AttributeId) lombok.val(lombok.val) Assert.assertTrue(org.junit.Assert.assertTrue) OperationLog(io.pravega.segmentstore.server.OperationLog) TableExtensionConfig(io.pravega.segmentstore.server.tables.TableExtensionConfig) IOException(java.io.IOException) Test(org.junit.Test) SystemJournal(io.pravega.segmentstore.storage.chunklayer.SystemJournal) Service(com.google.common.util.concurrent.Service) AtomicLong(java.util.concurrent.atomic.AtomicLong) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) ContainerAttributeIndex(io.pravega.segmentstore.server.attributes.ContainerAttributeIndex) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) OperationLogFactory(io.pravega.segmentstore.server.OperationLogFactory) SegmentContainer(io.pravega.segmentstore.server.SegmentContainer) Assert(org.junit.Assert) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Assert.assertEquals(org.junit.Assert.assertEquals) DynamicAttributeValue(io.pravega.segmentstore.contracts.DynamicAttributeValue) OperationPriority(io.pravega.segmentstore.server.logs.operations.OperationPriority) WriterConfig(io.pravega.segmentstore.server.writer.WriterConfig) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) BiFunction(java.util.function.BiFunction) RequiredArgsConstructor(lombok.RequiredArgsConstructor) TimeoutException(java.util.concurrent.TimeoutException) ByteBuffer(java.nio.ByteBuffer) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) ReadIndexFactory(io.pravega.segmentstore.server.ReadIndexFactory) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) ContainerAttributeIndexFactoryImpl(io.pravega.segmentstore.server.attributes.ContainerAttributeIndexFactoryImpl) AttributeIndexFactory(io.pravega.segmentstore.server.attributes.AttributeIndexFactory) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) AbstractService(com.google.common.util.concurrent.AbstractService) AttributeIdLengthMismatchException(io.pravega.segmentstore.server.logs.AttributeIdLengthMismatchException) ServiceListeners(io.pravega.segmentstore.server.ServiceListeners) ContainerOfflineException(io.pravega.segmentstore.server.ContainerOfflineException) Predicate(java.util.function.Predicate) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) CompletionException(java.util.concurrent.CompletionException) ReadResultEntryType(io.pravega.segmentstore.contracts.ReadResultEntryType) UUID(java.util.UUID) DataLogWriterNotPrimaryException(io.pravega.segmentstore.storage.DataLogWriterNotPrimaryException) DynamicAttributeUpdate(io.pravega.segmentstore.contracts.DynamicAttributeUpdate) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) SegmentMetadataComparer(io.pravega.segmentstore.server.SegmentMetadataComparer) List(java.util.List) ByteArraySegment(io.pravega.common.util.ByteArraySegment) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) WriterSegmentProcessor(io.pravega.segmentstore.server.WriterSegmentProcessor) DurableDataLogFactory(io.pravega.segmentstore.storage.DurableDataLogFactory) ReadResult(io.pravega.segmentstore.contracts.ReadResult) IntStream(java.util.stream.IntStream) ObjectClosedException(io.pravega.common.ObjectClosedException) Setter(lombok.Setter) Getter(lombok.Getter) AsyncStorageWrapper(io.pravega.segmentstore.storage.AsyncStorageWrapper) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) HashSet(java.util.HashSet) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) ExecutorService(java.util.concurrent.ExecutorService) NameUtils(io.pravega.shared.NameUtils) ExecutorServiceHelpers.newScheduledThreadPool(io.pravega.common.concurrent.ExecutorServiceHelpers.newScheduledThreadPool) TimeoutTimer(io.pravega.common.TimeoutTimer) RollingStorage(io.pravega.segmentstore.storage.rolling.RollingStorage) IntentionalException(io.pravega.test.common.IntentionalException) StreamSegmentMergedException(io.pravega.segmentstore.contracts.StreamSegmentMergedException) TestReadResultHandler(io.pravega.segmentstore.server.reading.TestReadResultHandler) SnapshotInfo(io.pravega.segmentstore.storage.chunklayer.SnapshotInfo) TestDurableDataLogFactory(io.pravega.segmentstore.server.TestDurableDataLogFactory) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) Rule(org.junit.Rule) SegmentOperation(io.pravega.segmentstore.server.SegmentOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) TypedProperties(io.pravega.common.util.TypedProperties) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) ReadIndex(io.pravega.segmentstore.server.ReadIndex) Comparator(java.util.Comparator) Collections(java.util.Collections) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) ByteArraySegment(io.pravega.common.util.ByteArraySegment) AtomicReference(java.util.concurrent.atomic.AtomicReference) Cleanup(lombok.Cleanup) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) CompletableFuture(java.util.concurrent.CompletableFuture) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) Test(org.junit.Test)

Aggregations

Timeout (org.junit.rules.Timeout)92 Rule (org.junit.Rule)91 Test (org.junit.Test)91 Assert (org.junit.Assert)84 AssertExtensions (io.pravega.test.common.AssertExtensions)81 Duration (java.time.Duration)81 ArrayList (java.util.ArrayList)80 lombok.val (lombok.val)79 TimeUnit (java.util.concurrent.TimeUnit)78 ThreadPooledTestSuite (io.pravega.test.common.ThreadPooledTestSuite)77 Cleanup (lombok.Cleanup)73 Collections (java.util.Collections)72 CompletableFuture (java.util.concurrent.CompletableFuture)72 HashMap (java.util.HashMap)69 ByteArraySegment (io.pravega.common.util.ByteArraySegment)68 Collectors (java.util.stream.Collectors)68 List (java.util.List)66 AtomicReference (java.util.concurrent.atomic.AtomicReference)66 IntentionalException (io.pravega.test.common.IntentionalException)62 Map (java.util.Map)62