Search in sources :

Example 36 with TimeoutTimer

use of io.pravega.common.TimeoutTimer in project pravega by pravega.

the class SegmentAggregator method beginReconciliation.

// endregion
// region Reconciliation
/**
 * Initiates the Storage reconciliation procedure. Gets the current state of the Segment from Storage, and based on that,
 * does one of the following:
 * * Nothing, if the Storage agrees with the Metadata.
 * * Throws a show-stopping DataCorruptionException (wrapped in a CompletionException) if the situation is unrecoverable.
 * * Initiates the Reconciliation Procedure, which allows the reconcile() method to execute.
 *
 * @param timer    Timer for the operation.
 * @return A CompletableFuture that indicates when the operation completed.
 */
private CompletableFuture<Void> beginReconciliation(TimeoutTimer timer) {
    assert this.state.get() == AggregatorState.ReconciliationNeeded : "beginReconciliation cannot be called if state == " + this.state;
    return this.storage.getStreamSegmentInfo(this.metadata.getName(), timer.getRemaining()).thenAcceptAsync(sp -> {
        if (sp.getLength() > this.metadata.getLength()) {
            // actor altering the Segment. We cannot recover automatically from this situation.
            throw new CompletionException(new ReconciliationFailureException("Actual Segment length in Storage is larger than the Metadata Length.", this.metadata, sp));
        } else if (sp.getLength() < this.metadata.getStorageLength()) {
            // We cannot recover automatically from this situation.
            throw new CompletionException(new ReconciliationFailureException("Actual Segment length in Storage is smaller than the Metadata StorageLength.", this.metadata, sp));
        } else if (sp.getLength() == this.metadata.getStorageLength() && sp.isSealed() == this.metadata.isSealedInStorage()) {
            // Nothing to do. Exit reconciliation and re-enter normal writing mode.
            setState(AggregatorState.Writing);
            return;
        }
        // If we get here, it means we have work to do. Set the state accordingly and move on.
        this.reconciliationState.set(new ReconciliationState(this.metadata, sp));
        setState(AggregatorState.Reconciling);
    }, this.executor).exceptionally(ex -> {
        ex = Exceptions.unwrap(ex);
        if (ex instanceof StreamSegmentNotExistsException) {
            // Segment does not exist in Storage. There are three possible situations:
            if (this.metadata.isMerged() || this.metadata.isDeleted()) {
                // Segment has actually been deleted. This is either due to un-acknowledged Merge/Delete operations
                // or because of a concurrent instance of the same container (with a lower epoch) is still
                // running and was in the middle of executing the Merge/Delete operation while we were initializing.
                updateMetadataPostDeletion(this.metadata);
                log.info("{}: Segment '{}' does not exist in Storage (reconciliation). Ignoring all further operations on it.", this.traceObjectId, this.metadata.getName());
                this.reconciliationState.set(null);
                setState(AggregatorState.Reconciling);
            } else if (this.metadata.getStorageLength() > 0) {
                // Storage. We cannot recover automatically from this situation.
                throw new CompletionException(new ReconciliationFailureException("Segment does not exist in Storage, but Metadata StorageLength is non-zero.", this.metadata, StreamSegmentInformation.builder().name(this.metadata.getName()).deleted(true).build()));
            } else {
                // Segment does not exist in Storage, but the Metadata indicates it should be empty. This is
                // a valid situation since we may not have had a chance to create it yet.
                this.reconciliationState.set(new ReconciliationState(this.metadata, StreamSegmentInformation.builder().name(this.metadata.getName()).build()));
                setState(AggregatorState.Reconciling);
            }
        } else {
            // Other kind of error - re-throw.
            throw new CompletionException(ex);
        }
        return null;
    });
}
Also used : Storage(io.pravega.segmentstore.storage.Storage) StreamSegmentInformation(io.pravega.segmentstore.contracts.StreamSegmentInformation) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) SneakyThrows(lombok.SneakyThrows) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) Cleanup(lombok.Cleanup) ServiceHaltException(io.pravega.segmentstore.server.ServiceHaltException) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) StreamSegmentSealedException(io.pravega.segmentstore.contracts.StreamSegmentSealedException) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) StreamSegmentTruncateOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentTruncateOperation) Attributes(io.pravega.segmentstore.contracts.Attributes) Predicate(java.util.function.Predicate) CompletionException(java.util.concurrent.CompletionException) ThreadSafe(javax.annotation.concurrent.ThreadSafe) GuardedBy(javax.annotation.concurrent.GuardedBy) Collectors(java.util.stream.Collectors) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) StreamSegmentExistsException(io.pravega.segmentstore.contracts.StreamSegmentExistsException) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) WriterSegmentProcessor(io.pravega.segmentstore.server.WriterSegmentProcessor) Futures(io.pravega.common.concurrent.Futures) Getter(lombok.Getter) SegmentRollingPolicy(io.pravega.segmentstore.storage.SegmentRollingPolicy) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) AbstractTimer(io.pravega.common.AbstractTimer) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) Nullable(javax.annotation.Nullable) LoggerHelpers(io.pravega.common.LoggerHelpers) TimeoutTimer(io.pravega.common.TimeoutTimer) Executor(java.util.concurrent.Executor) AtomicLong(java.util.concurrent.atomic.AtomicLong) SegmentOperation(io.pravega.segmentstore.server.SegmentOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Preconditions(com.google.common.base.Preconditions) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) ArrayDeque(java.util.ArrayDeque) DeleteSegmentOperation(io.pravega.segmentstore.server.logs.operations.DeleteSegmentOperation) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) CompletionException(java.util.concurrent.CompletionException) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException)

Example 37 with TimeoutTimer

use of io.pravega.common.TimeoutTimer in project pravega by pravega.

the class WriterTableProcessor method flushOnce.

/**
 * Performs a single flush attempt.
 *
 * @param segment A {@link DirectSegmentAccess} representing the Segment to flush on.
 * @param timer   Timer for the operation.
 * @return A CompletableFuture that, when completed, will indicate the flush has completed successfully. If the
 * operation failed, it will be failed with the appropriate exception. Notable exceptions:
 * <ul>
 * <li>{@link BadAttributeUpdateException} If a conditional update on the {@link TableAttributes#INDEX_OFFSET} attribute failed.
 * </ul>
 */
private CompletableFuture<TableWriterFlushResult> flushOnce(DirectSegmentAccess segment, TimeoutTimer timer) {
    // Index all the keys in the segment range pointed to by the aggregator.
    long lastOffset = this.aggregator.getLastIndexToProcessAtOnce(this.connector.getMaxFlushSize());
    assert lastOffset - this.aggregator.getFirstOffset() <= this.connector.getMaxFlushSize();
    if (lastOffset < this.aggregator.getLastOffset()) {
        log.info("{}: Partial flush initiated up to offset {}. State: {}.", this.traceObjectId, lastOffset, this.aggregator);
    }
    KeyUpdateCollection keyUpdates = readKeysFromSegment(segment, this.aggregator.getFirstOffset(), lastOffset, timer);
    log.debug("{}: Flush.ReadFromSegment KeyCount={}, UpdateCount={}, HighestCopiedOffset={}, LastIndexedOffset={}.", this.traceObjectId, keyUpdates.getUpdates().size(), keyUpdates.getTotalUpdateCount(), keyUpdates.getHighestCopiedOffset(), keyUpdates.getLastIndexedOffset());
    // for each such bucket and finally (reindex) update the bucket.
    return this.indexWriter.groupByBucket(segment, keyUpdates.getUpdates(), timer).thenComposeAsync(builders -> fetchExistingKeys(builders, segment, timer).thenComposeAsync(v -> {
        val bucketUpdates = builders.stream().map(BucketUpdate.Builder::build).collect(Collectors.toList());
        logBucketUpdates(bucketUpdates);
        return this.indexWriter.updateBuckets(segment, bucketUpdates, this.aggregator.getLastIndexedOffset(), keyUpdates.getLastIndexedOffset(), keyUpdates.getTotalUpdateCount(), timer.getRemaining());
    }, this.executor), this.executor).thenApply(updateCount -> new TableWriterFlushResult(keyUpdates, updateCount));
}
Also used : TableAttributes(io.pravega.segmentstore.contracts.tables.TableAttributes) SneakyThrows(lombok.SneakyThrows) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) CompletableFuture(java.util.concurrent.CompletableFuture) BadAttributeUpdateException(io.pravega.segmentstore.contracts.BadAttributeUpdateException) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) SerializationException(io.pravega.common.io.SerializationException) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) TimeoutTimer(io.pravega.common.TimeoutTimer) NonNull(lombok.NonNull) Collection(java.util.Collection) lombok.val(lombok.val) ThreadSafe(javax.annotation.concurrent.ThreadSafe) GuardedBy(javax.annotation.concurrent.GuardedBy) Collectors(java.util.stream.Collectors) AtomicLong(java.util.concurrent.atomic.AtomicLong) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) Slf4j(lombok.extern.slf4j.Slf4j) SegmentOperation(io.pravega.segmentstore.server.SegmentOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) WriterSegmentProcessor(io.pravega.segmentstore.server.WriterSegmentProcessor) ArrayDeque(java.util.ArrayDeque) Futures(io.pravega.common.concurrent.Futures) ReadResult(io.pravega.segmentstore.contracts.ReadResult) lombok.val(lombok.val)

Example 38 with TimeoutTimer

use of io.pravega.common.TimeoutTimer in project pravega by pravega.

the class TableBucketReader method findAllExisting.

// endregion
// region Searching
/**
 * Locates all {@link ResultT} instances in a TableBucket.
 *
 * @param bucketOffset The current segment offset of the Table Bucket we are looking into.
 * @param timer        A {@link TimeoutTimer} for the operation.
 * @return A CompletableFuture that, when completed, will contain a List with the desired result items. This list
 * will exclude all {@link ResultT} items that are marked as deleted.
 */
CompletableFuture<List<ResultT>> findAllExisting(long bucketOffset, TimeoutTimer timer) {
    val result = new HashMap<BufferView, ResultT>();
    // This handler ensures that items are only added once (per key) and only if they are not deleted. Since the items
    // are processed in descending version order, the first time we encounter its key is its latest value.
    BiConsumer<ResultT, Long> handler = (item, offset) -> {
        TableKey key = getKey(item);
        if (!result.containsKey(key.getKey())) {
            result.put(key.getKey(), key.getVersion() == TableKey.NOT_EXISTS ? null : item);
        }
    };
    return findAll(bucketOffset, handler, timer).thenApply(v -> result.values().stream().filter(Objects::nonNull).collect(Collectors.toList()));
}
Also used : lombok.val(lombok.val) TableKey(io.pravega.segmentstore.contracts.tables.TableKey) TimeoutTimer(io.pravega.common.TimeoutTimer) Executor(java.util.concurrent.Executor) NonNull(lombok.NonNull) RequiredArgsConstructor(lombok.RequiredArgsConstructor) lombok.val(lombok.val) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) AtomicLong(java.util.concurrent.atomic.AtomicLong) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) List(java.util.List) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) BiConsumer(java.util.function.BiConsumer) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Futures(io.pravega.common.concurrent.Futures) ReadResult(io.pravega.segmentstore.contracts.ReadResult) AsyncReadResultProcessor(io.pravega.segmentstore.server.reading.AsyncReadResultProcessor) HashMap(java.util.HashMap) AtomicLong(java.util.concurrent.atomic.AtomicLong) Objects(java.util.Objects) TableKey(io.pravega.segmentstore.contracts.tables.TableKey)

Example 39 with TimeoutTimer

use of io.pravega.common.TimeoutTimer in project pravega by pravega.

the class StreamSegmentContainer method append.

@Override
public CompletableFuture<Long> append(String streamSegmentName, long offset, BufferView data, AttributeUpdateCollection attributeUpdates, Duration timeout) {
    ensureRunning();
    TimeoutTimer timer = new TimeoutTimer(timeout);
    logRequest("appendWithOffset", streamSegmentName, data.getLength());
    this.metrics.appendWithOffset();
    return this.metadataStore.getOrAssignSegmentId(streamSegmentName, timer.getRemaining(), streamSegmentId -> {
        val operation = new StreamSegmentAppendOperation(streamSegmentId, offset, data, attributeUpdates);
        return processAppend(operation, timer).thenApply(v -> operation.getLastStreamSegmentOffset());
    });
}
Also used : lombok.val(lombok.val) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) TimeoutTimer(io.pravega.common.TimeoutTimer)

Example 40 with TimeoutTimer

use of io.pravega.common.TimeoutTimer in project pravega by pravega.

the class StreamSegmentContainer method sealStreamSegment.

@Override
public CompletableFuture<Long> sealStreamSegment(String streamSegmentName, Duration timeout) {
    ensureRunning();
    logRequest("seal", streamSegmentName);
    this.metrics.seal();
    TimeoutTimer timer = new TimeoutTimer(timeout);
    return this.metadataStore.getOrAssignSegmentId(streamSegmentName, timer.getRemaining(), streamSegmentId -> {
        StreamSegmentSealOperation operation = new StreamSegmentSealOperation(streamSegmentId);
        return addOperation(operation, timeout).thenApply(seqNo -> operation.getStreamSegmentOffset());
    });
}
Also used : StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) TimeoutTimer(io.pravega.common.TimeoutTimer)

Aggregations

TimeoutTimer (io.pravega.common.TimeoutTimer)97 lombok.val (lombok.val)55 CompletableFuture (java.util.concurrent.CompletableFuture)52 Duration (java.time.Duration)51 Futures (io.pravega.common.concurrent.Futures)47 Preconditions (com.google.common.base.Preconditions)41 CompletionException (java.util.concurrent.CompletionException)41 Slf4j (lombok.extern.slf4j.Slf4j)41 Collectors (java.util.stream.Collectors)40 SneakyThrows (lombok.SneakyThrows)40 Exceptions (io.pravega.common.Exceptions)39 BufferView (io.pravega.common.util.BufferView)37 Getter (lombok.Getter)37 SegmentProperties (io.pravega.segmentstore.contracts.SegmentProperties)36 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)36 StreamSegmentNotExistsException (io.pravega.segmentstore.contracts.StreamSegmentNotExistsException)34 SegmentMetadata (io.pravega.segmentstore.server.SegmentMetadata)34 RequiredArgsConstructor (lombok.RequiredArgsConstructor)34 ArrayList (java.util.ArrayList)33 List (java.util.List)31