Search in sources :

Example 1 with TableAttributes

use of io.pravega.segmentstore.contracts.tables.TableAttributes in project pravega by pravega.

the class WriterTableProcessor method flushOnce.

/**
 * Performs a single flush attempt.
 *
 * @param segment A {@link DirectSegmentAccess} representing the Segment to flush on.
 * @param timer   Timer for the operation.
 * @return A CompletableFuture that, when completed, will indicate the flush has completed successfully. If the
 * operation failed, it will be failed with the appropriate exception. Notable exceptions:
 * <ul>
 * <li>{@link BadAttributeUpdateException} If a conditional update on the {@link TableAttributes#INDEX_OFFSET} attribute failed.
 * </ul>
 */
private CompletableFuture<TableWriterFlushResult> flushOnce(DirectSegmentAccess segment, TimeoutTimer timer) {
    // Index all the keys in the segment range pointed to by the aggregator.
    long lastOffset = this.aggregator.getLastIndexToProcessAtOnce(this.connector.getMaxFlushSize());
    assert lastOffset - this.aggregator.getFirstOffset() <= this.connector.getMaxFlushSize();
    if (lastOffset < this.aggregator.getLastOffset()) {
        log.info("{}: Partial flush initiated up to offset {}. State: {}.", this.traceObjectId, lastOffset, this.aggregator);
    }
    KeyUpdateCollection keyUpdates = readKeysFromSegment(segment, this.aggregator.getFirstOffset(), lastOffset, timer);
    log.debug("{}: Flush.ReadFromSegment KeyCount={}, UpdateCount={}, HighestCopiedOffset={}, LastIndexedOffset={}.", this.traceObjectId, keyUpdates.getUpdates().size(), keyUpdates.getTotalUpdateCount(), keyUpdates.getHighestCopiedOffset(), keyUpdates.getLastIndexedOffset());
    // for each such bucket and finally (reindex) update the bucket.
    return this.indexWriter.groupByBucket(segment, keyUpdates.getUpdates(), timer).thenComposeAsync(builders -> fetchExistingKeys(builders, segment, timer).thenComposeAsync(v -> {
        val bucketUpdates = builders.stream().map(BucketUpdate.Builder::build).collect(Collectors.toList());
        logBucketUpdates(bucketUpdates);
        return this.indexWriter.updateBuckets(segment, bucketUpdates, this.aggregator.getLastIndexedOffset(), keyUpdates.getLastIndexedOffset(), keyUpdates.getTotalUpdateCount(), timer.getRemaining());
    }, this.executor), this.executor).thenApply(updateCount -> new TableWriterFlushResult(keyUpdates, updateCount));
}
Also used : TableAttributes(io.pravega.segmentstore.contracts.tables.TableAttributes) SneakyThrows(lombok.SneakyThrows) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) CompletableFuture(java.util.concurrent.CompletableFuture) BadAttributeUpdateException(io.pravega.segmentstore.contracts.BadAttributeUpdateException) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) SerializationException(io.pravega.common.io.SerializationException) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) TimeoutTimer(io.pravega.common.TimeoutTimer) NonNull(lombok.NonNull) Collection(java.util.Collection) lombok.val(lombok.val) ThreadSafe(javax.annotation.concurrent.ThreadSafe) GuardedBy(javax.annotation.concurrent.GuardedBy) Collectors(java.util.stream.Collectors) AtomicLong(java.util.concurrent.atomic.AtomicLong) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) Slf4j(lombok.extern.slf4j.Slf4j) SegmentOperation(io.pravega.segmentstore.server.SegmentOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) WriterSegmentProcessor(io.pravega.segmentstore.server.WriterSegmentProcessor) ArrayDeque(java.util.ArrayDeque) Futures(io.pravega.common.concurrent.Futures) ReadResult(io.pravega.segmentstore.contracts.ReadResult) lombok.val(lombok.val)

Example 2 with TableAttributes

use of io.pravega.segmentstore.contracts.tables.TableAttributes in project pravega by pravega.

the class HashTableSegmentLayoutTests method testRecovery.

/**
 * Tests the ability to resume operations after a recovery event. Scenarios include:
 * - Index is up-to-date ({@link TableAttributes#INDEX_OFFSET} equals Segment.Length.
 * - Index is not up-to-date ({@link TableAttributes#INDEX_OFFSET} is less than Segment.Length.
 */
@Test
public void testRecovery() throws Exception {
    // Generate a set of TestEntryData (List<TableEntry>, ExpectedResults.
    // Process each TestEntryData in turn.  After each time, re-create the Extension.
    // Verify gets are blocked on indexing. Then index, verify unblocked and then re-create the Extension, and verify again.
    val recoveryConfig = TableExtensionConfig.builder().with(TableExtensionConfig.MAX_TAIL_CACHE_PREINDEX_BATCH_SIZE, (MAX_KEY_LENGTH + MAX_VALUE_LENGTH) * 11).build();
    @Cleanup val context = new TableContext(recoveryConfig, executorService());
    // Create the Segment.
    context.ext.createSegment(SEGMENT_NAME, SegmentType.TABLE_SEGMENT_HASH, TIMEOUT).join();
    // Close the initial extension, as we don't need it anymore.
    context.ext.close();
    // Generate test data (in update & remove batches).
    val data = generateTestData(context);
    // Process each such batch in turn.
    for (int i = 0; i < data.size(); i++) {
        val current = data.get(i);
        // of this is writing the data to the Segment.
        try (val ext = context.createExtension()) {
            val toUpdate = current.toUpdate.entrySet().stream().map(e -> toUnconditionalTableEntry(e.getKey(), e.getValue(), 0)).collect(Collectors.toList());
            ext.put(SEGMENT_NAME, toUpdate, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
            val toRemove = current.toRemove.stream().map(k -> toUnconditionalKey(k, 0)).collect(Collectors.toList());
            ext.remove(SEGMENT_NAME, toRemove, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
        }
        // Create a new instance of the extension (which simulates a recovery) and verify it exhibits the correct behavior.
        try (val ext = context.createExtension()) {
            // We should have unindexed data.
            long lastIndexedOffset = context.segment().getInfo().getAttributes().get(TableAttributes.INDEX_OFFSET);
            long segmentLength = context.segment().getInfo().getLength();
            AssertExtensions.assertGreaterThan("Expected some unindexed data.", lastIndexedOffset, segmentLength);
            // This ensures that last iteration uses the processor.
            boolean useProcessor = i % 2 == 0;
            // Verify get requests are blocked.
            val key1 = current.expectedEntries.keySet().stream().findFirst().orElse(null);
            val get1 = ext.get(SEGMENT_NAME, Collections.singletonList(key1), TIMEOUT);
            val getResult1 = get1.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
            Assert.assertEquals("Unexpected completion result for recovered get.", current.expectedEntries.get(key1), getResult1.get(0).getValue());
            if (useProcessor) {
                // Create, populate, and flush the processor.
                @Cleanup val processor = (WriterTableProcessor) ext.createWriterSegmentProcessors(context.segment().getMetadata()).stream().findFirst().orElse(null);
                addToProcessor(lastIndexedOffset, (int) (segmentLength - lastIndexedOffset), processor);
                processor.flush(TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
                Assert.assertFalse("Unexpected result from WriterTableProcessor.mustFlush() after flushing.", processor.mustFlush());
            }
        }
    }
    // Verify final result. We create yet another extension here, and purposefully do not instantiate any writer processors;
    // we want to make sure the data are accessible even without that being created (since the indexing is all caught up).
    @Cleanup val ext2 = context.createExtension();
    check(data.get(data.size() - 1).expectedEntries, Collections.emptyList(), ext2);
}
Also used : lombok.val(lombok.val) TableAttributes(io.pravega.segmentstore.contracts.tables.TableAttributes) SneakyThrows(lombok.SneakyThrows) AssertExtensions(io.pravega.test.common.AssertExtensions) TimeoutException(java.util.concurrent.TimeoutException) Cleanup(lombok.Cleanup) HashMap(java.util.HashMap) IteratorArgs(io.pravega.segmentstore.contracts.tables.IteratorArgs) ArrayList(java.util.ArrayList) SegmentType(io.pravega.segmentstore.contracts.SegmentType) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Map(java.util.Map) Timeout(org.junit.rules.Timeout) AttributeId(io.pravega.segmentstore.contracts.AttributeId) lombok.val(lombok.val) Test(org.junit.Test) Collectors(java.util.stream.Collectors) TimeUnit(java.util.concurrent.TimeUnit) Rule(org.junit.Rule) ByteArraySegment(io.pravega.common.util.ByteArraySegment) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Collections(java.util.Collections) Cleanup(lombok.Cleanup) Test(org.junit.Test)

Example 3 with TableAttributes

use of io.pravega.segmentstore.contracts.tables.TableAttributes in project pravega by pravega.

the class TableCompactor method compact.

/**
 * Performs a compaction of the Table Segment. Refer to this class' Javadoc for a description of the compaction process.
 *
 * @param timer Timer for the operation.
 * @return A CompletableFuture that, when completed, indicate the compaction completed. When this future completes,
 * some of the Segment's Table Attributes may change to reflect the modifications to the Segment and/or compaction progress.
 * Notable exceptions:
 * <ul>
 * <li>{@link BadAttributeUpdateException} If the {@link TableAttributes#COMPACTION_OFFSET} changed while this method
 * was executing. In this case, no change will be performed and it can be resolved with a retry.</li>
 * </ul>
 */
CompletableFuture<Void> compact(TimeoutTimer timer) {
    long startOffset = getCompactionStartOffset();
    int maxLength = (int) Math.min(this.config.getMaxCompactionSize(), getLastIndexedOffset() - startOffset);
    if (startOffset < 0 || maxLength < 0) {
        // The Segment's Compaction offset must be a value between 0 and the current LastIndexedOffset.
        return Futures.failedFuture(new DataCorruptionException(String.format("%s: '%s' has CompactionStartOffset=%s and CompactionLength=%s.", this.traceLogId, this.metadata.getName(), startOffset, maxLength)));
    } else if (maxLength == 0) {
        // Nothing to do.
        log.debug("{}: Up to date.", this.traceLogId);
        return CompletableFuture.completedFuture(null);
    }
    // Read the Table Entries beginning at the specified offset, without exceeding the given maximum length.
    return getRetryPolicy().runAsync(() -> readCandidates(startOffset, maxLength, timer).thenComposeAsync(candidates -> excludeObsolete(candidates, timer).thenComposeAsync(v -> copyCandidates(candidates, timer), this.executor), this.executor), this.executor);
}
Also used : TableAttributes(io.pravega.segmentstore.contracts.tables.TableAttributes) Getter(lombok.Getter) SneakyThrows(lombok.SneakyThrows) Retry(io.pravega.common.util.Retry) RequiredArgsConstructor(lombok.RequiredArgsConstructor) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) BadAttributeUpdateException(io.pravega.segmentstore.contracts.BadAttributeUpdateException) ArrayList(java.util.ArrayList) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Map(java.util.Map) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) SerializationException(io.pravega.common.io.SerializationException) MathHelpers(io.pravega.common.MathHelpers) AsyncReadResultProcessor(io.pravega.segmentstore.server.reading.AsyncReadResultProcessor) TableKey(io.pravega.segmentstore.contracts.tables.TableKey) TimeoutTimer(io.pravega.common.TimeoutTimer) NonNull(lombok.NonNull) Collection(java.util.Collection) lombok.val(lombok.val) DirectSegmentAccess(io.pravega.segmentstore.server.DirectSegmentAccess) Slf4j(lombok.extern.slf4j.Slf4j) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) Data(lombok.Data) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Comparator(java.util.Comparator) TableEntry(io.pravega.segmentstore.contracts.tables.TableEntry) Futures(io.pravega.common.concurrent.Futures) ReadResult(io.pravega.segmentstore.contracts.ReadResult) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException)

Aggregations

BufferView (io.pravega.common.util.BufferView)3 TableAttributes (io.pravega.segmentstore.contracts.tables.TableAttributes)3 DataCorruptionException (io.pravega.segmentstore.server.DataCorruptionException)3 SneakyThrows (lombok.SneakyThrows)3 lombok.val (lombok.val)3 TimeoutTimer (io.pravega.common.TimeoutTimer)2 Futures (io.pravega.common.concurrent.Futures)2 SerializationException (io.pravega.common.io.SerializationException)2 BadAttributeUpdateException (io.pravega.segmentstore.contracts.BadAttributeUpdateException)2 ReadResult (io.pravega.segmentstore.contracts.ReadResult)2 TableEntry (io.pravega.segmentstore.contracts.tables.TableEntry)2 DirectSegmentAccess (io.pravega.segmentstore.server.DirectSegmentAccess)2 CachedStreamSegmentAppendOperation (io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation)2 Duration (java.time.Duration)2 ArrayList (java.util.ArrayList)2 Collection (java.util.Collection)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 CompletableFuture (java.util.concurrent.CompletableFuture)2 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)2