Search in sources :

Example 76 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class StreamSegmentContainerTests method testFutureReads.

/**
 * Tests the ability to perform future (tail) reads. Scenarios tested include:
 * * Regular appends
 * * Segment sealing
 * * Transaction merging.
 */
@Test
public void testFutureReads() throws Exception {
    final int nonSealReadLimit = 100;
    @Cleanup TestContext context = createContext();
    context.container.startAsync().awaitRunning();
    // 1. Create the StreamSegments.
    ArrayList<String> segmentNames = createSegments(context);
    HashMap<String, ArrayList<String>> transactionsBySegment = createTransactions(segmentNames, context);
    activateAllSegments(segmentNames, context);
    transactionsBySegment.values().forEach(s -> activateAllSegments(s, context));
    HashMap<String, ReadResult> readsBySegment = new HashMap<>();
    ArrayList<AsyncReadResultProcessor> readProcessors = new ArrayList<>();
    HashSet<String> segmentsToSeal = new HashSet<>();
    HashMap<String, ByteArrayOutputStream> readContents = new HashMap<>();
    HashMap<String, TestReadResultHandler> entryHandlers = new HashMap<>();
    // should stop upon reaching the limit).
    for (int i = 0; i < segmentNames.size(); i++) {
        String segmentName = segmentNames.get(i);
        ByteArrayOutputStream readContentsStream = new ByteArrayOutputStream();
        readContents.put(segmentName, readContentsStream);
        ReadResult readResult;
        if (i < segmentNames.size() / 2) {
            // We're going to seal this one at one point.
            segmentsToSeal.add(segmentName);
            readResult = context.container.read(segmentName, 0, Integer.MAX_VALUE, TIMEOUT).join();
        } else {
            // Just a regular one, nothing special.
            readResult = context.container.read(segmentName, 0, nonSealReadLimit, TIMEOUT).join();
        }
        // The Read callback is only accumulating data in this test; we will then compare it against the real data.
        TestReadResultHandler entryHandler = new TestReadResultHandler(readContentsStream, TIMEOUT);
        entryHandlers.put(segmentName, entryHandler);
        readsBySegment.put(segmentName, readResult);
        readProcessors.add(AsyncReadResultProcessor.process(readResult, entryHandler, executorService()));
    }
    // 3. Add some appends.
    HashMap<String, Long> lengths = new HashMap<>();
    HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
    appendToParentsAndTransactions(segmentNames, transactionsBySegment, lengths, segmentContents, context);
    // 4. Merge all the Transactions.
    Futures.allOf(mergeTransactions(transactionsBySegment, lengths, segmentContents, context, false)).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    // 5. Add more appends (to the parent segments)
    ArrayList<CompletableFuture<Void>> operationFutures = new ArrayList<>();
    for (int i = 0; i < 5; i++) {
        for (String segmentName : segmentNames) {
            RefCountByteArraySegment appendData = getAppendData(segmentName, APPENDS_PER_SEGMENT + i);
            operationFutures.add(Futures.toVoid(context.container.append(segmentName, appendData, null, TIMEOUT)));
            lengths.put(segmentName, lengths.getOrDefault(segmentName, 0L) + appendData.getLength());
            recordAppend(segmentName, appendData, segmentContents, null);
        }
    }
    segmentsToSeal.forEach(segmentName -> operationFutures.add(Futures.toVoid(context.container.sealStreamSegment(segmentName, TIMEOUT))));
    Futures.allOf(operationFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    // Now wait for all the reads to complete, and verify their results against the expected output.
    Futures.allOf(entryHandlers.values().stream().map(h -> h.getCompleted()).collect(Collectors.toList())).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    readProcessors.forEach(AsyncReadResultProcessor::close);
    // Check to see if any errors got thrown (and caught) during the reading process).
    for (Map.Entry<String, TestReadResultHandler> e : entryHandlers.entrySet()) {
        Throwable err = e.getValue().getError().get();
        if (err != null) {
            // The next check (see below) will verify if the segments were properly read).
            if (!(err instanceof StreamSegmentSealedException && segmentsToSeal.contains(e.getKey()))) {
                Assert.fail("Unexpected error happened while processing Segment " + e.getKey() + ": " + e.getValue().getError().get());
            }
        }
    }
    // Check that all the ReadResults are closed
    for (Map.Entry<String, ReadResult> e : readsBySegment.entrySet()) {
        Assert.assertTrue("Read result is not closed for segment " + e.getKey(), e.getValue().isClosed());
    }
    // Compare, byte-by-byte, the outcome of the tail reads.
    Assert.assertEquals("Unexpected number of segments were read.", segmentContents.size(), readContents.size());
    for (String segmentName : segmentNames) {
        boolean isSealed = segmentsToSeal.contains(segmentName);
        byte[] expectedData = segmentContents.get(segmentName).toByteArray();
        byte[] actualData = readContents.get(segmentName).toByteArray();
        int expectedLength = isSealed ? (int) (long) lengths.get(segmentName) : nonSealReadLimit;
        Assert.assertEquals("Unexpected read length for segment " + segmentName, expectedLength, actualData.length);
        AssertExtensions.assertArrayEquals("Unexpected read contents for segment " + segmentName, expectedData, 0, actualData, 0, actualData.length);
    }
    // 6. Writer moving data to Storage.
    waitForSegmentsInStorage(segmentNames, context).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    checkStorage(segmentContents, lengths, context);
}
Also used : Arrays(java.util.Arrays) Storage(io.pravega.segmentstore.storage.Storage) StreamSegmentInformation(io.pravega.segmentstore.contracts.StreamSegmentInformation) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) ContainerEventProcessor(io.pravega.segmentstore.server.ContainerEventProcessor) Cleanup(lombok.Cleanup) StorageWriterFactory(io.pravega.segmentstore.server.writer.StorageWriterFactory) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) Future(java.util.concurrent.Future) ContainerTableExtensionImpl(io.pravega.segmentstore.server.tables.ContainerTableExtensionImpl) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Duration(java.time.Duration) Map(java.util.Map) CachePolicy(io.pravega.segmentstore.server.CachePolicy) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) AsyncReadResultProcessor(io.pravega.segmentstore.server.reading.AsyncReadResultProcessor) ContainerReadIndexFactory(io.pravega.segmentstore.server.reading.ContainerReadIndexFactory) InMemoryDurableDataLogFactory(io.pravega.segmentstore.storage.mocks.InMemoryDurableDataLogFactory) DurableLogFactory(io.pravega.segmentstore.server.logs.DurableLogFactory) Attributes(io.pravega.segmentstore.contracts.Attributes) DurableLogConfig(io.pravega.segmentstore.server.logs.DurableLogConfig) Writer(io.pravega.segmentstore.server.Writer) StandardCharsets(java.nio.charset.StandardCharsets) Stream(java.util.stream.Stream) SegmentContainerFactory(io.pravega.segmentstore.server.SegmentContainerFactory) ContainerTableExtension(io.pravega.segmentstore.server.tables.ContainerTableExtension) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) SyncStorage(io.pravega.segmentstore.storage.SyncStorage) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) TestUtils(io.pravega.test.common.TestUtils) Futures(io.pravega.common.concurrent.Futures) CacheManager(io.pravega.segmentstore.server.CacheManager) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IllegalContainerStateException(io.pravega.segmentstore.server.IllegalContainerStateException) TooManyActiveSegmentsException(io.pravega.segmentstore.contracts.TooManyActiveSegmentsException) EntrySerializerTests(io.pravega.segmentstore.server.tables.EntrySerializerTests) Exceptions(io.pravega.common.Exceptions) StorageFactory(io.pravega.segmentstore.storage.StorageFactory) BadAttributeUpdateException(io.pravega.segmentstore.contracts.BadAttributeUpdateException) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentType(io.pravega.segmentstore.contracts.SegmentType) Runnables(com.google.common.util.concurrent.Runnables) AttributeIndexConfig(io.pravega.segmentstore.server.attributes.AttributeIndexConfig) ReadIndexConfig(io.pravega.segmentstore.server.reading.ReadIndexConfig) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) WriterTableProcessor(io.pravega.segmentstore.server.tables.WriterTableProcessor) ConfigurationException(io.pravega.common.util.ConfigurationException) SegmentContainerExtension(io.pravega.segmentstore.server.SegmentContainerExtension) WriterFactory(io.pravega.segmentstore.server.WriterFactory) Properties(java.util.Properties) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) Executor(java.util.concurrent.Executor) AttributeId(io.pravega.segmentstore.contracts.AttributeId) lombok.val(lombok.val) Assert.assertTrue(org.junit.Assert.assertTrue) OperationLog(io.pravega.segmentstore.server.OperationLog) TableExtensionConfig(io.pravega.segmentstore.server.tables.TableExtensionConfig) IOException(java.io.IOException) Test(org.junit.Test) SystemJournal(io.pravega.segmentstore.storage.chunklayer.SystemJournal) Service(com.google.common.util.concurrent.Service) AtomicLong(java.util.concurrent.atomic.AtomicLong) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) ContainerAttributeIndex(io.pravega.segmentstore.server.attributes.ContainerAttributeIndex) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) OperationLogFactory(io.pravega.segmentstore.server.OperationLogFactory) SegmentContainer(io.pravega.segmentstore.server.SegmentContainer) Assert(org.junit.Assert) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Assert.assertEquals(org.junit.Assert.assertEquals) DynamicAttributeValue(io.pravega.segmentstore.contracts.DynamicAttributeValue) OperationPriority(io.pravega.segmentstore.server.logs.operations.OperationPriority) WriterConfig(io.pravega.segmentstore.server.writer.WriterConfig) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) BiFunction(java.util.function.BiFunction) RequiredArgsConstructor(lombok.RequiredArgsConstructor) TimeoutException(java.util.concurrent.TimeoutException) ByteBuffer(java.nio.ByteBuffer) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) ReadIndexFactory(io.pravega.segmentstore.server.ReadIndexFactory) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) ContainerAttributeIndexFactoryImpl(io.pravega.segmentstore.server.attributes.ContainerAttributeIndexFactoryImpl) AttributeIndexFactory(io.pravega.segmentstore.server.attributes.AttributeIndexFactory) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) AbstractService(com.google.common.util.concurrent.AbstractService) AttributeIdLengthMismatchException(io.pravega.segmentstore.server.logs.AttributeIdLengthMismatchException) ServiceListeners(io.pravega.segmentstore.server.ServiceListeners) ContainerOfflineException(io.pravega.segmentstore.server.ContainerOfflineException) Predicate(java.util.function.Predicate) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) CompletionException(java.util.concurrent.CompletionException) ReadResultEntryType(io.pravega.segmentstore.contracts.ReadResultEntryType) UUID(java.util.UUID) DataLogWriterNotPrimaryException(io.pravega.segmentstore.storage.DataLogWriterNotPrimaryException) DynamicAttributeUpdate(io.pravega.segmentstore.contracts.DynamicAttributeUpdate) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) SegmentMetadataComparer(io.pravega.segmentstore.server.SegmentMetadataComparer) List(java.util.List) ByteArraySegment(io.pravega.common.util.ByteArraySegment) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) WriterSegmentProcessor(io.pravega.segmentstore.server.WriterSegmentProcessor) DurableDataLogFactory(io.pravega.segmentstore.storage.DurableDataLogFactory) ReadResult(io.pravega.segmentstore.contracts.ReadResult) IntStream(java.util.stream.IntStream) ObjectClosedException(io.pravega.common.ObjectClosedException) Setter(lombok.Setter) Getter(lombok.Getter) AsyncStorageWrapper(io.pravega.segmentstore.storage.AsyncStorageWrapper) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) HashSet(java.util.HashSet) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) ExecutorService(java.util.concurrent.ExecutorService) NameUtils(io.pravega.shared.NameUtils) ExecutorServiceHelpers.newScheduledThreadPool(io.pravega.common.concurrent.ExecutorServiceHelpers.newScheduledThreadPool) TimeoutTimer(io.pravega.common.TimeoutTimer) RollingStorage(io.pravega.segmentstore.storage.rolling.RollingStorage) IntentionalException(io.pravega.test.common.IntentionalException) StreamSegmentMergedException(io.pravega.segmentstore.contracts.StreamSegmentMergedException) TestReadResultHandler(io.pravega.segmentstore.server.reading.TestReadResultHandler) SnapshotInfo(io.pravega.segmentstore.storage.chunklayer.SnapshotInfo) TestDurableDataLogFactory(io.pravega.segmentstore.server.TestDurableDataLogFactory) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) Rule(org.junit.Rule) SegmentOperation(io.pravega.segmentstore.server.SegmentOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) TypedProperties(io.pravega.common.util.TypedProperties) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) ReadIndex(io.pravega.segmentstore.server.ReadIndex) Comparator(java.util.Comparator) Collections(java.util.Collections) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ReadResult(io.pravega.segmentstore.contracts.ReadResult) Cleanup(lombok.Cleanup) CompletableFuture(java.util.concurrent.CompletableFuture) TestReadResultHandler(io.pravega.segmentstore.server.reading.TestReadResultHandler) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) HashSet(java.util.HashSet) ByteArrayOutputStream(java.io.ByteArrayOutputStream) AsyncReadResultProcessor(io.pravega.segmentstore.server.reading.AsyncReadResultProcessor) AtomicLong(java.util.concurrent.atomic.AtomicLong) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 77 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class DurableLogTests method testRecoveryWithIncrementalCheckpoints.

/**
 * Tests the DurableLog recovery process when there are multiple {@link MetadataCheckpointOperation}s added, with each
 * such checkpoint including information about evicted segments or segments which had their storage state modified.
 */
@Test
public void testRecoveryWithIncrementalCheckpoints() throws Exception {
    final int streamSegmentCount = 50;
    // Setup a DurableLog and start it.
    @Cleanup TestDurableDataLogFactory dataLogFactory = new TestDurableDataLogFactory(new InMemoryDurableDataLogFactory(MAX_DATA_LOG_APPEND_SIZE, executorService()));
    @Cleanup Storage storage = InMemoryStorageFactory.newStorage(executorService());
    storage.initialize(1);
    // First DurableLog. We use this for generating data.
    val metadata1 = new MetadataBuilder(CONTAINER_ID).build();
    @Cleanup CacheStorage cacheStorage = new DirectMemoryCache(Integer.MAX_VALUE);
    @Cleanup CacheManager cacheManager = new CacheManager(CachePolicy.INFINITE, cacheStorage, executorService());
    List<Long> deletedIds;
    Set<Long> evictIds;
    try (ReadIndex readIndex = new ContainerReadIndex(DEFAULT_READ_INDEX_CONFIG, metadata1, storage, cacheManager, executorService());
        DurableLog durableLog = new DurableLog(ContainerSetup.defaultDurableLogConfig(), metadata1, dataLogFactory, readIndex, executorService())) {
        durableLog.startAsync().awaitRunning();
        // Create some segments.
        val segmentIds = new ArrayList<>(createStreamSegmentsWithOperations(streamSegmentCount, durableLog));
        deletedIds = segmentIds.subList(0, 5);
        val mergedFromIds = segmentIds.subList(5, 10);
        // Must be same length as mergeFrom
        val mergedToIds = segmentIds.subList(10, 15);
        evictIds = new HashSet<>(segmentIds.subList(15, 20));
        val changeStorageStateIds = segmentIds.subList(20, segmentIds.size() - 5);
        // Append something to each segment.
        for (val segmentId : segmentIds) {
            if (!evictIds.contains(segmentId)) {
                durableLog.add(new StreamSegmentAppendOperation(segmentId, generateAppendData((int) (long) segmentId), null), OperationPriority.Normal, TIMEOUT).join();
            }
        }
        // Checkpoint 1.
        durableLog.checkpoint(TIMEOUT).join();
        // Delete some segments.
        for (val segmentId : deletedIds) {
            durableLog.add(new DeleteSegmentOperation(segmentId), OperationPriority.Normal, TIMEOUT).join();
        }
        // Checkpoint 2.
        durableLog.checkpoint(TIMEOUT).join();
        // Merge some segments.
        for (int i = 0; i < mergedFromIds.size(); i++) {
            durableLog.add(new StreamSegmentSealOperation(mergedFromIds.get(i)), OperationPriority.Normal, TIMEOUT).join();
            durableLog.add(new MergeSegmentOperation(mergedToIds.get(i), mergedFromIds.get(i)), OperationPriority.Normal, TIMEOUT).join();
        }
        // Checkpoint 3.
        durableLog.checkpoint(TIMEOUT).join();
        // Evict some segments.
        val evictableContainerMetadata = (EvictableMetadata) metadata1;
        metadata1.removeTruncationMarkers(metadata1.getOperationSequenceNumber());
        val toEvict = evictableContainerMetadata.getEvictionCandidates(Integer.MAX_VALUE, segmentIds.size()).stream().filter(m -> evictIds.contains(m.getId())).collect(Collectors.toList());
        val evicted = evictableContainerMetadata.cleanup(toEvict, Integer.MAX_VALUE);
        AssertExtensions.assertContainsSameElements("", evictIds, evicted.stream().map(SegmentMetadata::getId).collect(Collectors.toList()));
        // Checkpoint 4.
        durableLog.checkpoint(TIMEOUT).join();
        // Update storage state for some segments.
        for (val segmentId : changeStorageStateIds) {
            val sm = metadata1.getStreamSegmentMetadata(segmentId);
            if (segmentId % 3 == 0) {
                sm.setStorageLength(sm.getLength());
            }
            if (segmentId % 4 == 0) {
                sm.markSealed();
                sm.markSealedInStorage();
            }
            if (segmentId % 5 == 0) {
                sm.markDeleted();
                sm.markDeletedInStorage();
            }
        }
        // Checkpoint 5.
        durableLog.checkpoint(TIMEOUT).join();
        // Stop the processor.
        durableLog.stopAsync().awaitTerminated();
    }
    // Second DurableLog. We use this for recovery.
    val metadata2 = new MetadataBuilder(CONTAINER_ID).build();
    try (ContainerReadIndex readIndex = new ContainerReadIndex(DEFAULT_READ_INDEX_CONFIG, metadata2, storage, cacheManager, executorService());
        DurableLog durableLog = new DurableLog(ContainerSetup.defaultDurableLogConfig(), metadata2, dataLogFactory, readIndex, executorService())) {
        durableLog.startAsync().awaitRunning();
        // Validate metadata matches.
        val expectedSegmentIds = metadata1.getAllStreamSegmentIds();
        val actualSegmentIds = metadata2.getAllStreamSegmentIds();
        AssertExtensions.assertContainsSameElements("Unexpected set of recovered segments. Only Active segments expected to have been recovered.", expectedSegmentIds, actualSegmentIds);
        val expectedSegments = expectedSegmentIds.stream().sorted().map(metadata1::getStreamSegmentMetadata).collect(Collectors.toList());
        val actualSegments = actualSegmentIds.stream().sorted().map(metadata2::getStreamSegmentMetadata).collect(Collectors.toList());
        for (int i = 0; i < expectedSegments.size(); i++) {
            val e = expectedSegments.get(i);
            val a = actualSegments.get(i);
            SegmentMetadataComparer.assertEquals("Recovered segment metadata mismatch", e, a);
        }
        // Validate read index is as it should. Here, we can only check if the read indices for evicted segments are
        // no longer loaded; we do more thorough checks in the ContainerReadIndexTests suite.
        Streams.concat(evictIds.stream(), deletedIds.stream()).forEach(segmentId -> Assert.assertNull("Not expecting a read index for an evicted or deleted segment.", readIndex.getIndex(segmentId)));
        // Stop the processor.
        durableLog.stopAsync().awaitTerminated();
    }
}
Also used : Storage(io.pravega.segmentstore.storage.Storage) StreamSegmentInformation(io.pravega.segmentstore.contracts.StreamSegmentInformation) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) OperationPriority(io.pravega.segmentstore.server.logs.operations.OperationPriority) SneakyThrows(lombok.SneakyThrows) StorageMetadataCheckpointOperation(io.pravega.segmentstore.server.logs.operations.StorageMetadataCheckpointOperation) AssertExtensions(io.pravega.test.common.AssertExtensions) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) TimeoutException(java.util.concurrent.TimeoutException) Cleanup(lombok.Cleanup) LogAddress(io.pravega.segmentstore.storage.LogAddress) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) DataLogNotAvailableException(io.pravega.segmentstore.storage.DataLogNotAvailableException) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) CheckpointOperationBase(io.pravega.segmentstore.server.logs.operations.CheckpointOperationBase) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Duration(java.time.Duration) CachePolicy(io.pravega.segmentstore.server.CachePolicy) Operation(io.pravega.segmentstore.server.logs.operations.Operation) InMemoryDurableDataLogFactory(io.pravega.segmentstore.storage.mocks.InMemoryDurableDataLogFactory) ServiceListeners(io.pravega.segmentstore.server.ServiceListeners) ContainerOfflineException(io.pravega.segmentstore.server.ContainerOfflineException) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Set(java.util.Set) CompletionException(java.util.concurrent.CompletionException) Streams(com.google.common.collect.Streams) DataLogWriterNotPrimaryException(io.pravega.segmentstore.storage.DataLogWriterNotPrimaryException) Collectors(java.util.stream.Collectors) SegmentMetadataComparer(io.pravega.segmentstore.server.SegmentMetadataComparer) ErrorInjector(io.pravega.test.common.ErrorInjector) List(java.util.List) ByteArraySegment(io.pravega.common.util.ByteArraySegment) StreamSegmentContainerMetadata(io.pravega.segmentstore.server.containers.StreamSegmentContainerMetadata) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) TestUtils(io.pravega.test.common.TestUtils) Queue(java.util.Queue) Futures(io.pravega.common.concurrent.Futures) CacheManager(io.pravega.segmentstore.server.CacheManager) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) StreamSegmentException(io.pravega.segmentstore.contracts.StreamSegmentException) Exceptions(io.pravega.common.Exceptions) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) EvictableMetadata(io.pravega.segmentstore.server.EvictableMetadata) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) ReadIndexConfig(io.pravega.segmentstore.server.reading.ReadIndexConfig) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Timeout(org.junit.rules.Timeout) DurableDataLogException(io.pravega.segmentstore.storage.DurableDataLogException) OperationComparer(io.pravega.segmentstore.server.logs.operations.OperationComparer) ContainerReadIndex(io.pravega.segmentstore.server.reading.ContainerReadIndex) StreamSegmentMapOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentMapOperation) DataLogDisabledException(io.pravega.segmentstore.storage.DataLogDisabledException) Iterator(java.util.Iterator) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) MetadataCheckpointOperation(io.pravega.segmentstore.server.logs.operations.MetadataCheckpointOperation) OperationLog(io.pravega.segmentstore.server.OperationLog) IOException(java.io.IOException) Test(org.junit.Test) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) Service(com.google.common.util.concurrent.Service) TestDurableDataLogFactory(io.pravega.segmentstore.server.TestDurableDataLogFactory) TimeUnit(java.util.concurrent.TimeUnit) AbstractMap(java.util.AbstractMap) Rule(org.junit.Rule) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Data(lombok.Data) ReadIndex(io.pravega.segmentstore.server.ReadIndex) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) Collections(java.util.Collections) CompositeArrayView(io.pravega.common.util.CompositeArrayView) DeleteSegmentOperation(io.pravega.segmentstore.server.logs.operations.DeleteSegmentOperation) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) ArrayList(java.util.ArrayList) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Cleanup(lombok.Cleanup) DeleteSegmentOperation(io.pravega.segmentstore.server.logs.operations.DeleteSegmentOperation) EvictableMetadata(io.pravega.segmentstore.server.EvictableMetadata) CacheManager(io.pravega.segmentstore.server.CacheManager) lombok.val(lombok.val) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ContainerReadIndex(io.pravega.segmentstore.server.reading.ContainerReadIndex) ReadIndex(io.pravega.segmentstore.server.ReadIndex) InMemoryDurableDataLogFactory(io.pravega.segmentstore.storage.mocks.InMemoryDurableDataLogFactory) ContainerReadIndex(io.pravega.segmentstore.server.reading.ContainerReadIndex) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) Storage(io.pravega.segmentstore.storage.Storage) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) TestDurableDataLogFactory(io.pravega.segmentstore.server.TestDurableDataLogFactory) Test(org.junit.Test)

Example 78 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class OperationProcessorTests method testWithInvalidOperations.

/**
 * Tests the ability of the OperationProcessor to process Operations when encountering invalid operations (such as
 * appends to StreamSegments that do not exist or to those that are sealed). This covers the following exceptions:
 * * StreamSegmentNotExistsException
 * * StreamSegmentSealedException
 * * General MetadataUpdateException.
 */
@Test
public void testWithInvalidOperations() throws Exception {
    int streamSegmentCount = 10;
    int appendsPerStreamSegment = 40;
    // We are going to prematurely seal this StreamSegment.
    long sealedStreamSegmentId = 6;
    // We are going to prematurely mark this StreamSegment as deleted.
    long deletedStreamSegmentId = 8;
    // This is a bogus StreamSegment, that does not exist.
    long nonExistentStreamSegmentId;
    @Cleanup TestContext context = new TestContext();
    // Generate some test data (no need to complicate ourselves with Transactions here; that is tested in the no-failure test).
    HashSet<Long> streamSegmentIds = createStreamSegmentsInMetadata(streamSegmentCount, context.metadata);
    nonExistentStreamSegmentId = streamSegmentIds.size();
    streamSegmentIds.add(nonExistentStreamSegmentId);
    context.metadata.getStreamSegmentMetadata(sealedStreamSegmentId).markSealed();
    context.metadata.getStreamSegmentMetadata(deletedStreamSegmentId).markDeleted();
    List<Operation> operations = generateOperations(streamSegmentIds, new HashMap<>(), appendsPerStreamSegment, METADATA_CHECKPOINT_EVERY, false, false);
    // Setup an OperationProcessor and start it.
    @Cleanup TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, MAX_DATA_LOG_APPEND_SIZE, executorService());
    dataLog.initialize(TIMEOUT);
    @Cleanup OperationProcessor operationProcessor = new OperationProcessor(context.metadata, context.stateUpdater, dataLog, getNoOpCheckpointPolicy(), getDefaultThrottlerSettings(), executorService());
    operationProcessor.startAsync().awaitRunning();
    // Process all generated operations.
    List<OperationWithCompletion> completionFutures = processOperations(operations, operationProcessor);
    // Wait for all such operations to complete. We are expecting exceptions, so verify that we do.
    AssertExtensions.assertThrows("No operations failed.", OperationWithCompletion.allOf(completionFutures)::join, ex -> ex instanceof MetadataUpdateException || ex instanceof StreamSegmentException);
    HashSet<Long> streamSegmentsWithNoContents = new HashSet<>();
    streamSegmentsWithNoContents.add(sealedStreamSegmentId);
    streamSegmentsWithNoContents.add(deletedStreamSegmentId);
    streamSegmentsWithNoContents.add(nonExistentStreamSegmentId);
    // Verify that the "right" operations failed, while the others succeeded.
    for (OperationWithCompletion oc : completionFutures) {
        if (oc.operation instanceof StorageOperation) {
            long streamSegmentId = ((StorageOperation) oc.operation).getStreamSegmentId();
            if (streamSegmentsWithNoContents.contains(streamSegmentId)) {
                Assert.assertTrue("Completion future for invalid StreamSegment " + streamSegmentId + " did not complete exceptionally.", oc.completion.isCompletedExceptionally());
                Predicate<Throwable> errorValidator;
                if (streamSegmentId == sealedStreamSegmentId) {
                    errorValidator = ex -> ex instanceof StreamSegmentSealedException;
                } else if (streamSegmentId == deletedStreamSegmentId) {
                    errorValidator = ex -> ex instanceof StreamSegmentNotExistsException;
                } else {
                    errorValidator = ex -> ex instanceof MetadataUpdateException;
                }
                AssertExtensions.assertThrows("Unexpected exception for failed Operation.", oc.completion::join, errorValidator);
                continue;
            }
        }
        // If we get here, we must verify no exception was thrown.
        oc.completion.join();
    }
    performLogOperationChecks(completionFutures, context.memoryLog, dataLog, context.metadata);
    performMetadataChecks(streamSegmentIds, streamSegmentsWithNoContents, new HashMap<>(), completionFutures, context.metadata, false, false);
    performReadIndexChecks(completionFutures, context.readIndex);
    operationProcessor.stopAsync().awaitTerminated();
}
Also used : OperationSerializer(io.pravega.segmentstore.server.logs.operations.OperationSerializer) Storage(io.pravega.segmentstore.storage.Storage) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) OperationPriority(io.pravega.segmentstore.server.logs.operations.OperationPriority) ThrottleSourceListener(io.pravega.segmentstore.storage.ThrottleSourceListener) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) RequiredArgsConstructor(lombok.RequiredArgsConstructor) Cleanup(lombok.Cleanup) LogAddress(io.pravega.segmentstore.storage.LogAddress) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) CheckpointOperationBase(io.pravega.segmentstore.server.logs.operations.CheckpointOperationBase) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Duration(java.time.Duration) CachePolicy(io.pravega.segmentstore.server.CachePolicy) Operation(io.pravega.segmentstore.server.logs.operations.Operation) Mockito.doReturn(org.mockito.Mockito.doReturn) CloseableIterator(io.pravega.common.util.CloseableIterator) ServiceListeners(io.pravega.segmentstore.server.ServiceListeners) SegmentStoreMetrics(io.pravega.segmentstore.server.SegmentStoreMetrics) CancellationException(java.util.concurrent.CancellationException) Predicate(java.util.function.Predicate) Collection(java.util.Collection) CompletionException(java.util.concurrent.CompletionException) DataLogWriterNotPrimaryException(io.pravega.segmentstore.storage.DataLogWriterNotPrimaryException) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) List(java.util.List) ByteArraySegment(io.pravega.common.util.ByteArraySegment) DirectMemoryCache(io.pravega.segmentstore.storage.cache.DirectMemoryCache) Queue(java.util.Queue) WriteSettings(io.pravega.segmentstore.storage.WriteSettings) CacheManager(io.pravega.segmentstore.server.CacheManager) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ObjectClosedException(io.pravega.common.ObjectClosedException) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) Getter(lombok.Getter) StreamSegmentException(io.pravega.segmentstore.contracts.StreamSegmentException) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) Mockito.spy(org.mockito.Mockito.spy) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) TruncationMarkerRepository(io.pravega.segmentstore.server.TruncationMarkerRepository) CacheStorage(io.pravega.segmentstore.storage.cache.CacheStorage) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) Runnables(com.google.common.util.concurrent.Runnables) ReadIndexConfig(io.pravega.segmentstore.server.reading.ReadIndexConfig) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Timeout(org.junit.rules.Timeout) DurableDataLogException(io.pravega.segmentstore.storage.DurableDataLogException) OperationComparer(io.pravega.segmentstore.server.logs.operations.OperationComparer) ContainerReadIndex(io.pravega.segmentstore.server.reading.ContainerReadIndex) Mockito.anyLong(org.mockito.Mockito.anyLong) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) Iterator(java.util.Iterator) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) MetadataCheckpointOperation(io.pravega.segmentstore.server.logs.operations.MetadataCheckpointOperation) IOException(java.io.IOException) Test(org.junit.Test) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) Service(com.google.common.util.concurrent.Service) TimeUnit(java.util.concurrent.TimeUnit) Mockito(org.mockito.Mockito) AbstractMap(java.util.AbstractMap) Rule(org.junit.Rule) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) QueueStats(io.pravega.segmentstore.storage.QueueStats) ReadIndex(io.pravega.segmentstore.server.ReadIndex) Assert(org.junit.Assert) Collections(java.util.Collections) CompositeArrayView(io.pravega.common.util.CompositeArrayView) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) StreamSegmentException(io.pravega.segmentstore.contracts.StreamSegmentException) Operation(io.pravega.segmentstore.server.logs.operations.Operation) MetadataCheckpointOperation(io.pravega.segmentstore.server.logs.operations.MetadataCheckpointOperation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Cleanup(lombok.Cleanup) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) Mockito.anyLong(org.mockito.Mockito.anyLong) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 79 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class DataFrameReaderTests method testReadsWithPartialEntries.

/**
 * Tests the case when we begin reading from a DataFrame which begins with a partial record. That record needs to
 * be dropped (not returned). DataFrameReader should always return full records.
 */
@Test
public void testReadsWithPartialEntries() throws Exception {
    // This test will only work if LARGE_RECORD_MIN_SIZE > FRAME_SIZE.
    ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(3, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MIN_SIZE, 0);
    try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
        dataLog.initialize(TIMEOUT);
        ArrayList<DataFrameBuilder.CommitArgs> commitFrames = new ArrayList<>();
        BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
        val args = new DataFrameBuilder.Args(Callbacks::doNothing, commitFrames::add, errorCallback, executorService());
        try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
            for (TestLogItem r : records) {
                b.append(r);
            }
            b.flush();
        }
        // Delete the first entry in the DataLog.
        ArrayList<Integer> failedIndices = new ArrayList<>();
        dataLog.truncate(commitFrames.get(0).getLogAddress(), TIMEOUT).join();
        // Given that each TestLogItem's length is larger than a data frame, truncating the first DataFrame will
        // invalidate the first one.
        failedIndices.add(0);
        TestSerializer logItemFactory = new TestSerializer();
        DataFrameReader<TestLogItem> reader = new DataFrameReader<>(dataLog, logItemFactory, CONTAINER_ID);
        List<TestLogItem> readItems = readAll(reader);
        checkReadResult(records, failedIndices, readItems);
    }
}
Also used : ObjectClosedException(io.pravega.common.ObjectClosedException) Callbacks(io.pravega.common.function.Callbacks) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) AssertExtensions(io.pravega.test.common.AssertExtensions) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Exceptions(io.pravega.common.Exceptions) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) ErrorInjector(io.pravega.test.common.ErrorInjector) DataLogNotAvailableException(io.pravega.segmentstore.storage.DataLogNotAvailableException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) List(java.util.List) Rule(org.junit.Rule) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) Duration(java.time.Duration) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) SerializationException(io.pravega.common.io.SerializationException) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) lombok.val(lombok.val) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) ArrayList(java.util.ArrayList) Callbacks(io.pravega.common.function.Callbacks) Test(org.junit.Test)

Example 80 with Timeout

use of org.junit.rules.Timeout in project pravega by pravega.

the class DataFrameReaderTests method testReadsNoFailure.

/**
 * Tests the happy case: DataFrameReader can read from a DataLog when the are no exceptions.
 */
@Test
public void testReadsNoFailure() throws Exception {
    // Fail every X records (write-wise).
    int failEvery = 7;
    ArrayList<TestLogItem> records = DataFrameTestHelpers.generateLogItems(100, SMALL_RECORD_MIN_SIZE, SMALL_RECORD_MAX_SIZE, 0);
    records.addAll(DataFrameTestHelpers.generateLogItems(100, LARGE_RECORD_MIN_SIZE, LARGE_RECORD_MAX_SIZE, records.size()));
    // Have every other 'failEvery' record fail after writing 90% of itself.
    for (int i = 0; i < records.size(); i += failEvery) {
        records.get(i).failSerializationAfterComplete(0.9, new IOException("intentional " + i));
    }
    HashSet<Integer> failedIndices = new HashSet<>();
    try (TestDurableDataLog dataLog = TestDurableDataLog.create(CONTAINER_ID, FRAME_SIZE, executorService())) {
        dataLog.initialize(TIMEOUT);
        BiConsumer<Throwable, DataFrameBuilder.CommitArgs> errorCallback = (ex, a) -> Assert.fail(String.format("Unexpected error occurred upon commit. %s", ex));
        val args = new DataFrameBuilder.Args(Callbacks::doNothing, Callbacks::doNothing, errorCallback, executorService());
        try (DataFrameBuilder<TestLogItem> b = new DataFrameBuilder<>(dataLog, SERIALIZER, args)) {
            for (int i = 0; i < records.size(); i++) {
                try {
                    b.append(records.get(i));
                } catch (IOException ex) {
                    failedIndices.add(i);
                }
            }
            b.flush();
        }
        TestSerializer logItemFactory = new TestSerializer();
        DataFrameReader<TestLogItem> reader = new DataFrameReader<>(dataLog, logItemFactory, CONTAINER_ID);
        List<TestLogItem> readItems = readAll(reader);
        checkReadResult(records, failedIndices, readItems);
    }
}
Also used : ObjectClosedException(io.pravega.common.ObjectClosedException) Callbacks(io.pravega.common.function.Callbacks) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) AssertExtensions(io.pravega.test.common.AssertExtensions) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Exceptions(io.pravega.common.Exceptions) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) ErrorInjector(io.pravega.test.common.ErrorInjector) DataLogNotAvailableException(io.pravega.segmentstore.storage.DataLogNotAvailableException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) List(java.util.List) Rule(org.junit.Rule) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) Duration(java.time.Duration) BiConsumer(java.util.function.BiConsumer) Timeout(org.junit.rules.Timeout) SerializationException(io.pravega.common.io.SerializationException) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) lombok.val(lombok.val) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) IOException(java.io.IOException) Callbacks(io.pravega.common.function.Callbacks) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

Timeout (org.junit.rules.Timeout)92 Rule (org.junit.Rule)91 Test (org.junit.Test)91 Assert (org.junit.Assert)84 AssertExtensions (io.pravega.test.common.AssertExtensions)81 Duration (java.time.Duration)81 ArrayList (java.util.ArrayList)80 lombok.val (lombok.val)79 TimeUnit (java.util.concurrent.TimeUnit)78 ThreadPooledTestSuite (io.pravega.test.common.ThreadPooledTestSuite)77 Cleanup (lombok.Cleanup)73 Collections (java.util.Collections)72 CompletableFuture (java.util.concurrent.CompletableFuture)72 HashMap (java.util.HashMap)69 ByteArraySegment (io.pravega.common.util.ByteArraySegment)68 Collectors (java.util.stream.Collectors)68 List (java.util.List)66 AtomicReference (java.util.concurrent.atomic.AtomicReference)66 IntentionalException (io.pravega.test.common.IntentionalException)62 Map (java.util.Map)62