Search in sources :

Example 11 with DurableDataLog

use of io.pravega.segmentstore.storage.DurableDataLog in project pravega by pravega.

the class OperationProcessorTests method testConcurrentStopAndCommit.

/**
 * Tests a scenario where the OperationProcessor is shut down while a DataFrame is being processed and will eventually
 * complete successfully - however its operation should be cancelled.
 */
@Test
public void testConcurrentStopAndCommit() throws Exception {
    @Cleanup TestContext context = new TestContext();
    // Generate some test data.
    val segmentId = createStreamSegmentsInMetadata(1, context.metadata).stream().findFirst().orElse(-1L);
    List<Operation> operations = Collections.singletonList(new StreamSegmentAppendOperation(segmentId, new byte[1], null));
    CompletableFuture<LogAddress> appendCallback = new CompletableFuture<>();
    // Setup an OperationProcessor with a custom DurableDataLog and start it.
    @Cleanup DurableDataLog dataLog = new ManualAppendOnlyDurableDataLog(() -> appendCallback);
    dataLog.initialize(TIMEOUT);
    @Cleanup OperationProcessor operationProcessor = new OperationProcessor(context.metadata, context.stateUpdater, dataLog, getNoOpCheckpointPolicy(), executorService());
    operationProcessor.startAsync().awaitRunning();
    // Process all generated operations.
    OperationWithCompletion completionFuture = processOperations(operations, operationProcessor).stream().findFirst().orElse(null);
    operationProcessor.stopAsync();
    appendCallback.complete(new TestLogAddress(1));
    // Stop the processor.
    operationProcessor.awaitTerminated();
    // Wait for the operation to complete. The operation should have been cancelled (due to the OperationProcessor
    // shutting down) - no other exception (or successful completion is accepted).
    AssertExtensions.assertThrows("Operation did not fail with the right exception.", () -> completionFuture.completion, ex -> ex instanceof CancellationException || ex instanceof ObjectClosedException);
}
Also used : lombok.val(lombok.val) DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) TestDurableDataLog(io.pravega.segmentstore.server.TestDurableDataLog) ProbeOperation(io.pravega.segmentstore.server.logs.operations.ProbeOperation) Operation(io.pravega.segmentstore.server.logs.operations.Operation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Cleanup(lombok.Cleanup) LogAddress(io.pravega.segmentstore.storage.LogAddress) CompletableFuture(java.util.concurrent.CompletableFuture) CancellationException(java.util.concurrent.CancellationException) ObjectClosedException(io.pravega.common.ObjectClosedException) Test(org.junit.Test)

Example 12 with DurableDataLog

use of io.pravega.segmentstore.storage.DurableDataLog in project pravega by pravega.

the class BookKeeperLogTests method testAppendPermanentFailures.

/**
 * Tests the ability to retry writes when Bookies fail.
 */
@Test
public void testAppendPermanentFailures() throws Exception {
    try (DurableDataLog log = createDurableDataLog()) {
        log.initialize(TIMEOUT);
        List<CompletableFuture<LogAddress>> appendFutures = new ArrayList<>();
        try {
            // Suspend a bookie (this will trigger write errors).
            stopFirstBookie();
            // Issue appends in parallel.
            int writeCount = getWriteCount();
            for (int i = 0; i < writeCount; i++) {
                appendFutures.add(log.append(new ByteArraySegment(getWriteData()), TIMEOUT));
            }
            // Verify that all writes failed or got cancelled.
            AtomicBoolean cancellationEncountered = new AtomicBoolean(false);
            for (val f : appendFutures) {
                AssertExtensions.assertThrows("Write did not fail correctly.", () -> f.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS), ex -> {
                    cancellationEncountered.set(cancellationEncountered.get() || ex instanceof CancellationException);
                    if (cancellationEncountered.get()) {
                        return ex instanceof CancellationException;
                    } else {
                        return ex instanceof RetriesExhaustedException || ex instanceof DurableDataLogException;
                    }
                });
            }
        } finally {
            // Don't forget to resume the bookie, but only AFTER we are done testing.
            restartFirstBookie();
        }
    }
}
Also used : DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) lombok.val(lombok.val) DurableDataLogException(io.pravega.segmentstore.storage.DurableDataLogException) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) CompletableFuture(java.util.concurrent.CompletableFuture) ByteArraySegment(io.pravega.common.util.ByteArraySegment) RetriesExhaustedException(io.pravega.common.util.RetriesExhaustedException) CancellationException(java.util.concurrent.CancellationException) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 13 with DurableDataLog

use of io.pravega.segmentstore.storage.DurableDataLog in project pravega by pravega.

the class BookKeeperLogTests method testAppendTransientBookieFailure.

/**
 * Tests the ability to retry writes when Bookies fail.
 */
@Test
public void testAppendTransientBookieFailure() throws Exception {
    TreeMap<LogAddress, byte[]> writeData = new TreeMap<>(Comparator.comparingLong(LogAddress::getSequence));
    try (DurableDataLog log = createDurableDataLog()) {
        log.initialize(TIMEOUT);
        val dataList = new ArrayList<byte[]>();
        val futures = new ArrayList<CompletableFuture<LogAddress>>();
        try {
            // Suspend a bookie (this will trigger write errors).
            stopFirstBookie();
            // Issue appends in parallel, without waiting for them.
            int writeCount = getWriteCount();
            for (int i = 0; i < writeCount; i++) {
                byte[] data = getWriteData();
                futures.add(log.append(new ByteArraySegment(data), TIMEOUT));
                dataList.add(data);
            }
        } finally {
            // Resume the bookie with the appends still in flight.
            restartFirstBookie();
        }
        // Wait for all writes to complete, then reassemble the data in the order set by LogAddress.
        val addresses = Futures.allOfWithResults(futures).join();
        for (int i = 0; i < dataList.size(); i++) {
            writeData.put(addresses.get(i), dataList.get(i));
        }
    }
    // Verify data.
    try (DurableDataLog log = createDurableDataLog()) {
        log.initialize(TIMEOUT);
        verifyReads(log, writeData);
    }
}
Also used : DurableDataLog(io.pravega.segmentstore.storage.DurableDataLog) lombok.val(lombok.val) LogAddress(io.pravega.segmentstore.storage.LogAddress) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ArrayList(java.util.ArrayList) TreeMap(java.util.TreeMap) Test(org.junit.Test)

Aggregations

DurableDataLog (io.pravega.segmentstore.storage.DurableDataLog)13 Test (org.junit.Test)8 lombok.val (lombok.val)6 ArrayList (java.util.ArrayList)5 CompletableFuture (java.util.concurrent.CompletableFuture)5 LogAddress (io.pravega.segmentstore.storage.LogAddress)4 CancellationException (java.util.concurrent.CancellationException)4 Cleanup (lombok.Cleanup)4 ObjectClosedException (io.pravega.common.ObjectClosedException)3 StreamSegmentNotExistsException (io.pravega.segmentstore.contracts.StreamSegmentNotExistsException)3 Duration (java.time.Duration)3 HashMap (java.util.HashMap)3 Runnables (com.google.common.util.concurrent.Runnables)2 Service (com.google.common.util.concurrent.Service)2 ArrayView (io.pravega.common.util.ArrayView)2 ByteArraySegment (io.pravega.common.util.ByteArraySegment)2 StreamSegmentSealedException (io.pravega.segmentstore.contracts.StreamSegmentSealedException)2 ConfigHelpers (io.pravega.segmentstore.server.ConfigHelpers)2 ReadIndex (io.pravega.segmentstore.server.ReadIndex)2 ServiceListeners (io.pravega.segmentstore.server.ServiceListeners)2