use of io.pravega.segmentstore.contracts.AttributeUpdateCollection in project pravega by pravega.
the class FixedKeyLengthTableSegmentLayout method put.
@Override
CompletableFuture<List<Long>> put(@NonNull DirectSegmentAccess segment, @NonNull List<TableEntry> entries, long tableSegmentOffset, TimeoutTimer timer) {
val segmentInfo = segment.getInfo();
ensureSegmentType(segmentInfo.getName(), segmentInfo.getType());
val segmentKeyLength = getSegmentKeyLength(segmentInfo);
ensureValidKeyLength(segmentInfo.getName(), segmentKeyLength);
val attributeUpdates = new AttributeUpdateCollection();
int batchOffset = 0;
val batchOffsets = new ArrayList<Integer>();
boolean isConditional = false;
for (val e : entries) {
val key = e.getKey();
Preconditions.checkArgument(key.getKey().getLength() == segmentKeyLength, "Entry Key Length for key `%s` incompatible with segment '%s' which requires key lengths of %s.", key, segmentInfo.getName(), segmentKeyLength);
attributeUpdates.add(createIndexUpdate(key, batchOffset));
isConditional |= key.hasVersion();
batchOffsets.add(batchOffset);
batchOffset += this.serializer.getUpdateLength(e);
}
logRequest("put", segmentInfo.getName(), isConditional, tableSegmentOffset, entries.size(), batchOffset);
if (batchOffset > this.config.getMaxBatchSize()) {
throw new UpdateBatchTooLargeException(batchOffset, this.config.getMaxBatchSize());
}
// Update total number of entries in Table (this includes updates to the same key).
attributeUpdates.add(new AttributeUpdate(TableAttributes.TOTAL_ENTRY_COUNT, AttributeUpdateType.Accumulate, entries.size()));
val serializedEntries = this.serializer.serializeUpdate(entries);
val append = tableSegmentOffset == TableSegmentLayout.NO_OFFSET ? segment.append(serializedEntries, attributeUpdates, timer.getRemaining()) : segment.append(serializedEntries, attributeUpdates, tableSegmentOffset, timer.getRemaining());
return handleConditionalUpdateException(append, segmentInfo).thenApply(segmentOffset -> {
this.compactionService.process(new CompactionCandidate(segment));
return batchOffsets.stream().map(offset -> offset + segmentOffset).collect(Collectors.toList());
});
}
use of io.pravega.segmentstore.contracts.AttributeUpdateCollection in project pravega by pravega.
the class DataRecoveryTest method testRepairLogEditOperationsWithContent.
@Test
public void testRepairLogEditOperationsWithContent() throws IOException {
// Setup command object.
STATE.set(new AdminCommandState());
Properties pravegaProperties = new Properties();
pravegaProperties.setProperty("pravegaservice.container.count", "1");
pravegaProperties.setProperty("pravegaservice.clusterName", "pravega0");
STATE.get().getConfigBuilder().include(pravegaProperties);
CommandArgs args = new CommandArgs(List.of("0"), STATE.get());
DurableDataLogRepairCommand command = Mockito.spy(new DurableDataLogRepairCommand(args));
List<DurableDataLogRepairCommand.LogEditOperation> editOps = new ArrayList<>();
// Case 1: Input Add Edit Operations for a MetadataCheckpointOperation and StorageMetadataCheckpointOperation operations
// with payload operation with zeros as content.
Mockito.doReturn(true).doReturn(true).doReturn(false).when(command).confirmContinue();
Mockito.doReturn(1L).when(command).getLongUserInput(Mockito.any());
Mockito.doReturn(100).when(command).getIntUserInput(Mockito.any());
Mockito.doReturn("add").doReturn("MetadataCheckpointOperation").doReturn("zero").doReturn("StorageMetadataCheckpointOperation").doReturn("zero").when(command).getStringUserInput(Mockito.any());
MetadataCheckpointOperation metadataCheckpointOperation = new MetadataCheckpointOperation();
metadataCheckpointOperation.setContents(new ByteArraySegment(new byte[100]));
StorageMetadataCheckpointOperation storageMetadataCheckpointOperation = new StorageMetadataCheckpointOperation();
storageMetadataCheckpointOperation.setContents(new ByteArraySegment(new byte[100]));
editOps.add(new DurableDataLogRepairCommand.LogEditOperation(DurableDataLogRepairCommand.LogEditType.ADD_OPERATION, 1, 1, metadataCheckpointOperation));
editOps.add(new DurableDataLogRepairCommand.LogEditOperation(DurableDataLogRepairCommand.LogEditType.ADD_OPERATION, 1, 1, storageMetadataCheckpointOperation));
Assert.assertEquals(editOps, command.getDurableLogEditsFromUser());
// Case 2: Input an Add Edit Operation for a StreamSegmentAppendOperation with content loaded from a file.
editOps.clear();
byte[] content = new byte[] { 1, 2, 3, 4, 5 };
File tmpFile = File.createTempFile("operationContent", "bin");
Files.write(tmpFile.toPath(), content);
Mockito.doReturn(true).doReturn(false).when(command).confirmContinue();
Mockito.doReturn(1L).when(command).getLongUserInput(Mockito.any());
Mockito.doReturn(1).doReturn(10).when(command).getIntUserInput(Mockito.any());
Mockito.doReturn("wrong").doReturn("add").doReturn("StreamSegmentAppendOperation").doReturn("file").doReturn(tmpFile.toString()).when(command).getStringUserInput(Mockito.any());
StreamSegmentAppendOperation appendOperation = new StreamSegmentAppendOperation(1, 20, new ByteArraySegment(content), new AttributeUpdateCollection());
editOps.add(new DurableDataLogRepairCommand.LogEditOperation(DurableDataLogRepairCommand.LogEditType.ADD_OPERATION, 1, 1, appendOperation));
Assert.assertEquals(editOps, command.getDurableLogEditsFromUser());
Files.delete(tmpFile.toPath());
// Case 3: Abort content generation.
Mockito.doReturn("quit").when(command).getStringUserInput(Mockito.any());
AssertExtensions.assertThrows("", command::createOperationContents, ex -> ex instanceof RuntimeException);
}
use of io.pravega.segmentstore.contracts.AttributeUpdateCollection in project pravega by pravega.
the class StreamSegmentContainerTests method mergeTransactions.
private ArrayList<CompletableFuture<Void>> mergeTransactions(HashMap<String, ArrayList<String>> transactionsBySegment, HashMap<String, Long> lengths, HashMap<String, ByteArrayOutputStream> segmentContents, TestContext context, boolean conditionalMerge) throws Exception {
ArrayList<CompletableFuture<Void>> mergeFutures = new ArrayList<>();
int i = 0;
for (Map.Entry<String, ArrayList<String>> e : transactionsBySegment.entrySet()) {
String parentName = e.getKey();
for (String transactionName : e.getValue()) {
if (++i % 2 == 0) {
// Every other Merge operation, pre-seal the source. We want to verify we correctly handle this situation as well.
mergeFutures.add(Futures.toVoid(context.container.sealStreamSegment(transactionName, TIMEOUT)));
}
// Use both calls, with and without attribute updates for mergeSegments.
if (conditionalMerge) {
AttributeUpdateCollection attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(AttributeId.fromUUID(UUID.nameUUIDFromBytes(transactionName.getBytes())), AttributeUpdateType.ReplaceIfEquals, transactionName.hashCode() + 1, transactionName.hashCode()));
mergeFutures.add(Futures.toVoid(context.container.mergeStreamSegment(parentName, transactionName, attributeUpdates, TIMEOUT)));
} else {
mergeFutures.add(Futures.toVoid(context.container.mergeStreamSegment(parentName, transactionName, TIMEOUT)));
}
// Update parent length.
lengths.put(parentName, lengths.get(parentName) + lengths.get(transactionName));
lengths.remove(transactionName);
// Update parent contents.
segmentContents.get(parentName).write(segmentContents.get(transactionName).toByteArray());
segmentContents.remove(transactionName);
}
}
return mergeFutures;
}
use of io.pravega.segmentstore.contracts.AttributeUpdateCollection in project pravega by pravega.
the class ContainerMetadataUpdateTransactionTests method testWithAttributesByReference.
private void testWithAttributesByReference(Function<AttributeUpdateCollection, Operation> createOperation) throws Exception {
final AttributeId referenceAttributeId = AttributeId.randomUUID();
final AttributeId attributeSegmentLength = AttributeId.randomUUID();
final long initialAttributeValue = 1234567;
UpdateableContainerMetadata metadata = createMetadata();
metadata.getStreamSegmentMetadata(SEGMENT_ID).updateAttributes(ImmutableMap.of(referenceAttributeId, initialAttributeValue));
val txn = createUpdateTransaction(metadata);
// Update #1.
AttributeUpdateCollection attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(referenceAttributeId, AttributeUpdateType.Accumulate, 2), new DynamicAttributeUpdate(attributeSegmentLength, AttributeUpdateType.None, DynamicAttributeValue.segmentLength(5)));
Map<AttributeId, Long> expectedValues = ImmutableMap.of(Attributes.ATTRIBUTE_SEGMENT_TYPE, DEFAULT_TYPE.getValue(), referenceAttributeId, initialAttributeValue + 2, attributeSegmentLength, SEGMENT_LENGTH + 5);
Operation op = createOperation.apply(attributeUpdates);
txn.preProcessOperation(op);
txn.acceptOperation(op);
// Verify result.
verifyAttributeUpdates("after acceptOperation", txn, attributeUpdates, expectedValues);
txn.commit(metadata);
SegmentMetadataComparer.assertSameAttributes("Unexpected attributes in segment metadata after final commit.", expectedValues, metadata.getStreamSegmentMetadata(SEGMENT_ID));
}
use of io.pravega.segmentstore.contracts.AttributeUpdateCollection in project pravega by pravega.
the class StreamSegmentContainerTests method testSegmentRegularOperations.
/**
* Tests the createSegment, append, updateAttributes, read, getSegmentInfo, getActiveSegments.
*/
@Test
public void testSegmentRegularOperations() throws Exception {
final AttributeId attributeAccumulate = AttributeId.randomUUID();
final AttributeId attributeReplace = AttributeId.randomUUID();
final AttributeId attributeReplaceIfGreater = AttributeId.randomUUID();
final AttributeId attributeReplaceIfEquals = AttributeId.randomUUID();
final AttributeId attributeNoUpdate = AttributeId.randomUUID();
final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
@Cleanup TestContext context = createContext();
context.container.startAsync().awaitRunning();
// 1. Create the StreamSegments.
ArrayList<String> segmentNames = createSegments(context);
checkActiveSegments(context.container, 0);
activateAllSegments(segmentNames, context);
checkActiveSegments(context.container, segmentNames.size());
// 2. Add some appends.
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
ArrayList<RefCountByteArraySegment> appends = new ArrayList<>();
HashMap<String, Long> lengths = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
for (String segmentName : segmentNames) {
val attributeUpdates = new AttributeUpdateCollection();
attributeUpdates.add(new AttributeUpdate(attributeAccumulate, AttributeUpdateType.Accumulate, 1));
attributeUpdates.add(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfGreater, AttributeUpdateType.ReplaceIfGreater, i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfEquals, i == 0 ? AttributeUpdateType.Replace : AttributeUpdateType.ReplaceIfEquals, i + 1, i));
RefCountByteArraySegment appendData = getAppendData(segmentName, i);
long expectedLength = lengths.getOrDefault(segmentName, 0L) + appendData.getLength();
val append = (i % 2 == 0) ? context.container.append(segmentName, appendData, attributeUpdates, TIMEOUT) : context.container.append(segmentName, lengths.get(segmentName), appendData, attributeUpdates, TIMEOUT);
opFutures.add(append.thenApply(length -> {
assertEquals(expectedLength, length.longValue());
return null;
}));
lengths.put(segmentName, expectedLength);
recordAppend(segmentName, appendData, segmentContents, appends);
}
}
// 2.1 Update some of the attributes.
for (String segmentName : segmentNames) {
// Record a one-off update.
opFutures.add(context.container.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(attributeNoUpdate, AttributeUpdateType.None, expectedAttributeValue)), TIMEOUT));
for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
val attributeUpdates = new AttributeUpdateCollection();
attributeUpdates.add(new AttributeUpdate(attributeAccumulate, AttributeUpdateType.Accumulate, 1));
attributeUpdates.add(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, APPENDS_PER_SEGMENT + i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfGreater, AttributeUpdateType.ReplaceIfGreater, APPENDS_PER_SEGMENT + i + 1));
attributeUpdates.add(new AttributeUpdate(attributeReplaceIfEquals, AttributeUpdateType.ReplaceIfEquals, APPENDS_PER_SEGMENT + i + 1, APPENDS_PER_SEGMENT + i));
opFutures.add(context.container.updateAttributes(segmentName, attributeUpdates, TIMEOUT));
}
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 3. getSegmentInfo
for (String segmentName : segmentNames) {
SegmentProperties sp = context.container.getStreamSegmentInfo(segmentName, TIMEOUT).join();
long expectedLength = lengths.get(segmentName);
Assert.assertEquals("Unexpected StartOffset for non-truncated segment " + segmentName, 0, sp.getStartOffset());
Assert.assertEquals("Unexpected length for segment " + segmentName, expectedLength, sp.getLength());
Assert.assertFalse("Unexpected value for isDeleted for segment " + segmentName, sp.isDeleted());
Assert.assertFalse("Unexpected value for isSealed for segment " + segmentName, sp.isDeleted());
// Verify all attribute values.
Assert.assertEquals("Unexpected value for attribute " + attributeAccumulate + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeNoUpdate, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeAccumulate + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeAccumulate, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeReplace + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeReplace, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeReplaceIfGreater + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeReplaceIfGreater, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeReplaceIfEquals + " for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeReplaceIfEquals, Attributes.NULL_ATTRIBUTE_VALUE));
val expectedType = getSegmentType(segmentName);
val actualType = SegmentType.fromAttributes(sp.getAttributes());
Assert.assertEquals("Unexpected Segment Type.", expectedType, actualType);
}
checkActiveSegments(context.container, segmentNames.size());
// 4. Reads (regular reads, not tail reads).
checkReadIndex(segmentContents, lengths, context);
// 4.1. After we ensured that all data has been ingested and processed, verify that all data buffers have been released.
checkAppendLeaks(appends);
// 5. Writer moving data to Storage.
waitForSegmentsInStorage(segmentNames, context).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
checkStorage(segmentContents, lengths, context);
context.container.stopAsync().awaitTerminated();
}
Aggregations