Search in sources :

Example 6 with CachedStreamSegmentAppendOperation

use of io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation in project pravega by pravega.

the class SegmentAggregatorTests method testFlushEmptyAppend.

/**
 * Tests the behavior of flush() empty appends.
 */
@Test
public void testFlushEmptyAppend() throws Exception {
    final WriterConfig config = DEFAULT_CONFIG;
    val rnd = new Random(0);
    final byte[] initialBytes = new byte[config.getMaxFlushSizeBytes()];
    final byte[] mergedBytes = new byte[100];
    final int segmentLength = initialBytes.length + mergedBytes.length;
    rnd.nextBytes(initialBytes);
    rnd.nextBytes(mergedBytes);
    @Cleanup TestContext context = new TestContext(config);
    // Create a segment in Storage.
    context.storage.create(SEGMENT_NAME, TIMEOUT).join();
    context.segmentAggregator.initialize(TIMEOUT).join();
    val metadata = (UpdateableSegmentMetadata) context.segmentAggregator.getMetadata();
    metadata.setLength(segmentLength);
    // First append fills up the max limit for the AggregatedAppend buffer.
    val append1 = new StreamSegmentAppendOperation(SEGMENT_ID, new ByteArraySegment(initialBytes), null);
    append1.setStreamSegmentOffset(0);
    append1.setSequenceNumber(context.containerMetadata.nextOperationSequenceNumber());
    context.dataSource.recordAppend(append1);
    context.segmentAggregator.add(new CachedStreamSegmentAppendOperation(append1));
    // Second append is empty.
    val emptyAppend = new StreamSegmentAppendOperation(SEGMENT_ID, BufferView.empty(), null);
    emptyAppend.setStreamSegmentOffset(initialBytes.length);
    emptyAppend.setSequenceNumber(context.containerMetadata.nextOperationSequenceNumber());
    context.dataSource.recordAppend(emptyAppend);
    context.segmentAggregator.add(new CachedStreamSegmentAppendOperation(emptyAppend));
    // Create a source segment.
    val sourceAggregator = context.transactionAggregators[0];
    val sourceMetadata = (UpdateableSegmentMetadata) sourceAggregator.getMetadata();
    sourceMetadata.setLength(mergedBytes.length);
    sourceMetadata.setStorageLength(mergedBytes.length);
    context.storage.create(sourceMetadata.getName(), TIMEOUT).join();
    context.storage.openWrite(sourceMetadata.getName()).thenCompose(handle -> context.storage.write(handle, 0, new ByteArrayInputStream(mergedBytes), mergedBytes.length, TIMEOUT).thenCompose(v -> context.storage.seal(handle, TIMEOUT))).join();
    // And include it via a Merge Op.
    sourceMetadata.markSealed();
    sourceMetadata.markSealedInStorage();
    sourceMetadata.markMerged();
    val mergeOp = new MergeSegmentOperation(SEGMENT_ID, sourceMetadata.getId());
    mergeOp.setStreamSegmentOffset(initialBytes.length);
    mergeOp.setLength(sourceMetadata.getLength());
    mergeOp.setSequenceNumber(context.containerMetadata.nextOperationSequenceNumber());
    context.segmentAggregator.add(mergeOp);
    // Flush, and verify the result.
    val flushResult1 = context.segmentAggregator.flush(TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
    Assert.assertEquals("Unexpected number of bytes flushed", initialBytes.length, flushResult1.getFlushedBytes());
    Assert.assertEquals("Unexpected number of bytes merged", mergedBytes.length, flushResult1.getMergedBytes());
    byte[] expectedData = new byte[initialBytes.length + mergedBytes.length];
    System.arraycopy(initialBytes, 0, expectedData, 0, initialBytes.length);
    System.arraycopy(mergedBytes, 0, expectedData, initialBytes.length, mergedBytes.length);
    verifySegmentData(expectedData, context);
}
Also used : lombok.val(lombok.val) Arrays(java.util.Arrays) StreamSegmentNotExistsException(io.pravega.segmentstore.contracts.StreamSegmentNotExistsException) AssertExtensions(io.pravega.test.common.AssertExtensions) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) RequiredArgsConstructor(lombok.RequiredArgsConstructor) Cleanup(lombok.Cleanup) Random(java.util.Random) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) SegmentProperties(io.pravega.segmentstore.contracts.SegmentProperties) AttributeUpdate(io.pravega.segmentstore.contracts.AttributeUpdate) SegmentHandle(io.pravega.segmentstore.storage.SegmentHandle) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) BufferView(io.pravega.common.util.BufferView) Duration(java.time.Duration) Map(java.util.Map) Operation(io.pravega.segmentstore.server.logs.operations.Operation) WriterFlushResult(io.pravega.segmentstore.server.WriterFlushResult) StreamSegmentTruncateOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentTruncateOperation) Attributes(io.pravega.segmentstore.contracts.Attributes) InMemoryStorage(io.pravega.segmentstore.storage.mocks.InMemoryStorage) Collectors(java.util.stream.Collectors) ErrorInjector(io.pravega.test.common.ErrorInjector) ByteBufferOutputStream(io.pravega.common.io.ByteBufferOutputStream) Stream(java.util.stream.Stream) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) BadOffsetException(io.pravega.segmentstore.contracts.BadOffsetException) Futures(io.pravega.common.concurrent.Futures) TestStorage(io.pravega.segmentstore.server.TestStorage) MetadataBuilder(io.pravega.segmentstore.server.MetadataBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) UpdateableContainerMetadata(io.pravega.segmentstore.server.UpdateableContainerMetadata) SegmentMetadata(io.pravega.segmentstore.server.SegmentMetadata) Timeout(org.junit.rules.Timeout) OutputStream(java.io.OutputStream) ManualTimer(io.pravega.segmentstore.server.ManualTimer) UpdateAttributesOperation(io.pravega.segmentstore.server.logs.operations.UpdateAttributesOperation) AttributeId(io.pravega.segmentstore.contracts.AttributeId) IntentionalException(io.pravega.test.common.IntentionalException) lombok.val(lombok.val) IOException(java.io.IOException) Test(org.junit.Test) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) AbstractMap(java.util.AbstractMap) AttributeUpdateCollection(io.pravega.segmentstore.contracts.AttributeUpdateCollection) Rule(org.junit.Rule) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StorageOperation(io.pravega.segmentstore.server.logs.operations.StorageOperation) TreeMap(java.util.TreeMap) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Preconditions(com.google.common.base.Preconditions) AttributeUpdateType(io.pravega.segmentstore.contracts.AttributeUpdateType) RandomFactory(io.pravega.common.hash.RandomFactory) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Assert(org.junit.Assert) Collections(java.util.Collections) DeleteSegmentOperation(io.pravega.segmentstore.server.logs.operations.DeleteSegmentOperation) StreamSegmentSealOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation) InputStream(java.io.InputStream) UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) ByteArraySegment(io.pravega.common.util.ByteArraySegment) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) Cleanup(lombok.Cleanup) MergeSegmentOperation(io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) Random(java.util.Random) ByteArrayInputStream(java.io.ByteArrayInputStream) Test(org.junit.Test)

Example 7 with CachedStreamSegmentAppendOperation

use of io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation in project pravega by pravega.

the class SegmentAggregatorTests method generateAppendAndUpdateMetadata.

private StorageOperation generateAppendAndUpdateMetadata(long segmentId, byte[] data, TestContext context) {
    UpdateableSegmentMetadata segmentMetadata = context.containerMetadata.getStreamSegmentMetadata(segmentId);
    long offset = segmentMetadata.getLength();
    segmentMetadata.setLength(offset + data.length);
    StreamSegmentAppendOperation op = new StreamSegmentAppendOperation(segmentId, new ByteArraySegment(data), null);
    op.setStreamSegmentOffset(offset);
    op.setSequenceNumber(context.containerMetadata.nextOperationSequenceNumber());
    context.dataSource.recordAppend(op);
    return new CachedStreamSegmentAppendOperation(op);
}
Also used : UpdateableSegmentMetadata(io.pravega.segmentstore.server.UpdateableSegmentMetadata) ByteArraySegment(io.pravega.common.util.ByteArraySegment) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation)

Example 8 with CachedStreamSegmentAppendOperation

use of io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation in project pravega by pravega.

the class WriterTableProcessorTests method generateSimulatedAppend.

private CachedStreamSegmentAppendOperation generateSimulatedAppend(long offset, int length, TestContext context) {
    val op = new StreamSegmentAppendOperation(context.metadata.getId(), offset, new ByteArraySegment(new byte[length]), null);
    op.setSequenceNumber(context.nextSequenceNumber());
    return new CachedStreamSegmentAppendOperation(op);
}
Also used : lombok.val(lombok.val) ByteArraySegment(io.pravega.common.util.ByteArraySegment) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation)

Example 9 with CachedStreamSegmentAppendOperation

use of io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation in project pravega by pravega.

the class WriterTableProcessorTests method testReconcileTableIndexOffset.

/**
 * Tests the ability to reconcile the {@link TableAttributes#INDEX_OFFSET} value when that changes behind the scenes.
 */
@Test
public void testReconcileTableIndexOffset() throws Exception {
    @Cleanup val context = new TestContext();
    // Generate two TableEntries, write them to the segment and queue them into the processor.
    val e1 = TableEntry.unversioned(new ByteArraySegment("k1".getBytes()), new ByteArraySegment("v1".getBytes()));
    val e2 = TableEntry.unversioned(new ByteArraySegment("k2".getBytes()), new ByteArraySegment("v2".getBytes()));
    val append1 = generateRawAppend(e1, INITIAL_LAST_INDEXED_OFFSET, context);
    val append2 = generateRawAppend(e2, append1.getLastStreamSegmentOffset(), context);
    context.segmentMock.append(append1.getData(), null, TIMEOUT).join();
    context.segmentMock.append(append2.getData(), null, TIMEOUT).join();
    context.processor.add(new CachedStreamSegmentAppendOperation(append1));
    context.processor.add(new CachedStreamSegmentAppendOperation(append2));
    // 1. INDEX_OFFSET changes to smaller than first append
    context.metadata.updateAttributes(Collections.singletonMap(TableAttributes.INDEX_OFFSET, INITIAL_LAST_INDEXED_OFFSET - 1));
    int attributeCountBefore = context.segmentMock.getAttributeCount();
    AssertExtensions.assertSuppliedFutureThrows("flush() worked when INDEX_OFFSET decreased.", () -> context.processor.flush(TIMEOUT), ex -> ex instanceof DataCorruptionException);
    int attributeCountAfter = context.segmentMock.getAttributeCount();
    Assert.assertEquals("flush() seems to have modified the index after failed attempt", attributeCountBefore, attributeCountAfter);
    Assert.assertEquals("flush() seems to have modified the index after failed attempt.", INITIAL_LAST_INDEXED_OFFSET - 1, IndexReader.getLastIndexedOffset(context.metadata));
    // 2. INDEX_OFFSET changes to middle of append.
    context.metadata.updateAttributes(Collections.singletonMap(TableAttributes.INDEX_OFFSET, INITIAL_LAST_INDEXED_OFFSET + 1));
    attributeCountBefore = context.segmentMock.getAttributeCount();
    AssertExtensions.assertSuppliedFutureThrows("flush() worked when INDEX_OFFSET changed to middle of append.", () -> context.processor.flush(TIMEOUT), ex -> ex instanceof DataCorruptionException);
    attributeCountAfter = context.segmentMock.getAttributeCount();
    Assert.assertEquals("flush() seems to have modified the index after failed attempt", attributeCountBefore, attributeCountAfter);
    Assert.assertEquals("flush() seems to have modified the index after failed attempt.", INITIAL_LAST_INDEXED_OFFSET + 1, IndexReader.getLastIndexedOffset(context.metadata));
    // 3. INDEX_OFFSET changes after the first append, but before the second one.
    context.metadata.updateAttributes(Collections.singletonMap(TableAttributes.INDEX_OFFSET, append2.getStreamSegmentOffset()));
    context.connector.refreshLastIndexedOffset();
    attributeCountBefore = context.segmentMock.getAttributeCount();
    context.processor.flush(TIMEOUT).join();
    attributeCountAfter = context.segmentMock.getAttributeCount();
    AssertExtensions.assertGreaterThan("flush() did not modify the index partial reconciliation.", attributeCountBefore, attributeCountAfter);
    Assert.assertEquals("flush() did not modify the index partial reconciliation.", append2.getLastStreamSegmentOffset(), IndexReader.getLastIndexedOffset(context.metadata));
    Assert.assertFalse("Unexpected result from mustFlush() after partial reconciliation.", context.processor.mustFlush());
    // 4. INDEX_OFFSET changes beyond the last append.
    val e3 = TableEntry.unversioned(new ByteArraySegment("k3".getBytes()), new ByteArraySegment("v3".getBytes()));
    val append3 = generateRawAppend(e3, append2.getLastStreamSegmentOffset(), context);
    context.segmentMock.append(append3.getData(), null, TIMEOUT).join();
    context.processor.add(new CachedStreamSegmentAppendOperation(append3));
    context.metadata.updateAttributes(Collections.singletonMap(TableAttributes.INDEX_OFFSET, append3.getLastStreamSegmentOffset() + 1));
    context.connector.refreshLastIndexedOffset();
    attributeCountBefore = context.segmentMock.getAttributeCount();
    context.processor.flush(TIMEOUT).join();
    attributeCountAfter = context.segmentMock.getAttributeCount();
    Assert.assertEquals("flush() seems to have modified the index after full reconciliation.", attributeCountBefore, attributeCountAfter);
    Assert.assertEquals("flush() did not properly update INDEX_OFFSET after full reconciliation.", append3.getLastStreamSegmentOffset() + 1, IndexReader.getLastIndexedOffset(context.metadata));
    Assert.assertFalse("Unexpected result from mustFlush() after full reconciliation.", context.processor.mustFlush());
}
Also used : lombok.val(lombok.val) ByteArraySegment(io.pravega.common.util.ByteArraySegment) DataCorruptionException(io.pravega.segmentstore.server.DataCorruptionException) Cleanup(lombok.Cleanup) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) Test(org.junit.Test)

Example 10 with CachedStreamSegmentAppendOperation

use of io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation in project pravega by pravega.

the class WriterTableProcessorTests method generateAndPopulateEntriesBatch.

private TestBatchData generateAndPopulateEntriesBatch(int batchSize, Map<BufferView, TableEntry> initialState, TestContext context) {
    val result = new TestBatchData(new HashMap<>(initialState));
    // Need a list so we can efficiently pick removal candidates.
    val allKeys = new ArrayList<>(initialState.keySet());
    for (int i = 0; i < batchSize; i++) {
        // We only generate a remove if we have something to remove.
        boolean remove = allKeys.size() > 0 && (context.random.nextDouble() < REMOVE_FRACTION);
        StreamSegmentAppendOperation append;
        if (remove) {
            val key = allKeys.get(context.random.nextInt(allKeys.size()));
            append = generateRawRemove(TableKey.unversioned(key), context.metadata.getLength(), context);
            result.expectedEntries.remove(key);
            allKeys.remove(key);
        } else {
            // Generate a new Table Entry.
            byte[] keyData = new byte[Math.max(1, context.random.nextInt(MAX_KEY_LENGTH))];
            context.random.nextBytes(keyData);
            byte[] valueData = new byte[context.random.nextInt(MAX_VALUE_LENGTH)];
            context.random.nextBytes(valueData);
            // Run the key through the external translator to ensure that we don't clash with internal keys by chance.
            // (this is done for us by ContainerTableExtensionImpl already, so we're only simulating the same behavior).
            val key = new ByteArraySegment(keyData);
            val offset = context.metadata.getLength();
            val entry = TableEntry.versioned(key, new ByteArraySegment(valueData), offset);
            append = generateRawAppend(entry, offset, context);
            result.expectedEntries.put(key, entry);
            allKeys.add(key);
        }
        // Add to segment.
        context.metadata.setLength(context.metadata.getLength() + append.getLength());
        context.segmentMock.append(append.getData(), null, TIMEOUT).join();
        // Add to result.
        result.operations.add(new CachedStreamSegmentAppendOperation(append));
    }
    return result;
}
Also used : lombok.val(lombok.val) ByteArraySegment(io.pravega.common.util.ByteArraySegment) ArrayList(java.util.ArrayList) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation) StreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation) CachedStreamSegmentAppendOperation(io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation)

Aggregations

CachedStreamSegmentAppendOperation (io.pravega.segmentstore.server.logs.operations.CachedStreamSegmentAppendOperation)18 StreamSegmentAppendOperation (io.pravega.segmentstore.server.logs.operations.StreamSegmentAppendOperation)14 ByteArraySegment (io.pravega.common.util.ByteArraySegment)13 lombok.val (lombok.val)11 DataCorruptionException (io.pravega.segmentstore.server.DataCorruptionException)9 Test (org.junit.Test)7 StreamSegmentNotExistsException (io.pravega.segmentstore.contracts.StreamSegmentNotExistsException)6 UpdateableSegmentMetadata (io.pravega.segmentstore.server.UpdateableSegmentMetadata)6 Cleanup (lombok.Cleanup)6 SegmentMetadata (io.pravega.segmentstore.server.SegmentMetadata)5 MergeSegmentOperation (io.pravega.segmentstore.server.logs.operations.MergeSegmentOperation)5 Operation (io.pravega.segmentstore.server.logs.operations.Operation)5 StreamSegmentSealOperation (io.pravega.segmentstore.server.logs.operations.StreamSegmentSealOperation)5 Exceptions (io.pravega.common.Exceptions)4 AttributeUpdateCollection (io.pravega.segmentstore.contracts.AttributeUpdateCollection)4 StorageOperation (io.pravega.segmentstore.server.logs.operations.StorageOperation)4 ArrayList (java.util.ArrayList)4 Futures (io.pravega.common.concurrent.Futures)3 AttributeId (io.pravega.segmentstore.contracts.AttributeId)3 AttributeUpdate (io.pravega.segmentstore.contracts.AttributeUpdate)3