use of io.pravega.segmentstore.contracts.AttributeUpdate in project pravega by pravega.
the class StreamSegmentContainerTests method testForSegment.
/**
* Tests the forSegment() method. We test this here vs in StreamSegmentContainerTests because we want to exercise
* additional code in StreamSegmentService. This will invoke the StreamSegmentContainer code as well.
*/
@Test
public void testForSegment() {
AttributeId attributeId1 = AttributeId.randomUUID();
AttributeId attributeId2 = AttributeId.randomUUID();
AttributeId attributeId3 = AttributeId.randomUUID();
@Cleanup val context = createContext();
context.container.startAsync().awaitRunning();
// Create the StreamSegments.
val segmentNames = createSegments(context);
// Add some appends.
for (String segmentName : segmentNames) {
byte[] appendData = ("Append_" + segmentName).getBytes();
val dsa = context.container.forSegment(segmentName, TIMEOUT).join();
dsa.append(new ByteArraySegment(appendData), AttributeUpdateCollection.from(new AttributeUpdate(attributeId1, AttributeUpdateType.None, 1L)), TIMEOUT).join();
dsa.updateAttributes(AttributeUpdateCollection.from(new AttributeUpdate(attributeId2, AttributeUpdateType.None, 2L)), TIMEOUT).join();
dsa.append(new ByteArraySegment(appendData), AttributeUpdateCollection.from(new AttributeUpdate(attributeId3, AttributeUpdateType.None, 3L)), dsa.getInfo().getLength(), TIMEOUT).join();
dsa.seal(TIMEOUT).join();
dsa.truncate(1, TIMEOUT).join();
// Check metadata.
val info = dsa.getInfo();
Assert.assertEquals("Unexpected name.", segmentName, info.getName());
Assert.assertEquals("Unexpected length.", 2 * appendData.length, info.getLength());
Assert.assertEquals("Unexpected startOffset.", 1, info.getStartOffset());
Assert.assertEquals("Unexpected attribute count.", 3, info.getAttributes().keySet().stream().filter(id -> !AUTO_ATTRIBUTES.contains(id)).count());
Assert.assertEquals("Unexpected attribute 1.", 1L, (long) info.getAttributes().get(attributeId1));
Assert.assertEquals("Unexpected attribute 2.", 2L, (long) info.getAttributes().get(attributeId2));
Assert.assertEquals("Unexpected attribute 2.", 3L, (long) info.getAttributes().get(attributeId3));
Assert.assertTrue("Unexpected isSealed.", info.isSealed());
// Not expecting any in this case as they are disabled for this segment.
Assert.assertEquals(-1L, (long) dsa.getExtendedAttributeCount(TIMEOUT).join());
// Check written data.
byte[] readBuffer = new byte[appendData.length - 1];
@Cleanup val readResult = dsa.read(1, readBuffer.length, TIMEOUT);
val firstEntry = readResult.next();
firstEntry.requestContent(TIMEOUT);
val entryContents = firstEntry.getContent().join();
Assert.assertEquals("Unexpected number of bytes read.", readBuffer.length, entryContents.getLength());
entryContents.copyTo(ByteBuffer.wrap(readBuffer));
AssertExtensions.assertArrayEquals("Unexpected data read back.", appendData, 1, readBuffer, 0, readBuffer.length);
}
}
use of io.pravega.segmentstore.contracts.AttributeUpdate in project pravega by pravega.
the class StreamSegmentContainerTests method testForceFlush.
/**
* Tests the {@link SegmentContainer#flushToStorage} method.
*/
@Test
public void testForceFlush() throws Exception {
final AttributeId attributeReplace = AttributeId.randomUUID();
final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
final int entriesPerSegment = 10;
@Cleanup TestContext context = new TestContext(DEFAULT_CONFIG, NO_TRUNCATIONS_DURABLE_LOG_CONFIG, INFREQUENT_FLUSH_WRITER_CONFIG, null);
val durableLog = new AtomicReference<OperationLog>();
val durableLogFactory = new WatchableOperationLogFactory(context.operationLogFactory, durableLog::set);
@Cleanup val container = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
container.startAsync().awaitRunning();
Assert.assertNotNull(durableLog.get());
val tableStore = container.getExtension(ContainerTableExtension.class);
// 1. Create the StreamSegments and Table Segments.
ArrayList<String> segmentNames = new ArrayList<>();
ArrayList<String> tableSegmentNames = new ArrayList<>();
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
for (int i = 0; i < SEGMENT_COUNT; i++) {
String segmentName = getSegmentName(i);
segmentNames.add(segmentName);
opFutures.add(container.createStreamSegment(segmentName, getSegmentType(segmentName), null, TIMEOUT));
}
for (int i = 0; i < SEGMENT_COUNT; i++) {
String segmentName = getSegmentName(i) + "_Table";
tableSegmentNames.add(segmentName);
val type = SegmentType.builder(getSegmentType(segmentName)).tableSegment().build();
opFutures.add(tableStore.createSegment(segmentName, type, TIMEOUT));
}
// 1.1 Wait for all segments to be created prior to using them.
Futures.allOf(opFutures).join();
opFutures.clear();
// 2. Add some appends and update some of the attributes.
HashMap<String, Long> lengths = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
for (String segmentName : segmentNames) {
for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
val attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, i + 1));
val appendData = getAppendData(segmentName, i);
long expectedLength = lengths.getOrDefault(segmentName, 0L) + appendData.getLength();
val append = (i % 2 == 0) ? container.append(segmentName, appendData, attributeUpdates, TIMEOUT) : container.append(segmentName, lengths.get(segmentName), appendData, attributeUpdates, TIMEOUT);
opFutures.add(Futures.toVoid(append));
lengths.put(segmentName, expectedLength);
recordAppend(segmentName, appendData, segmentContents, null);
}
for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
val attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, APPENDS_PER_SEGMENT + i + 1));
opFutures.add(container.updateAttributes(segmentName, attributeUpdates, TIMEOUT));
}
}
// 2.2 Add some entries to the table segments.
final BiFunction<String, Integer, TableEntry> createTableEntry = (segmentName, entryId) -> TableEntry.unversioned(new ByteArraySegment(String.format("Key_%s_%s", segmentName, entryId).getBytes()), new ByteArraySegment(String.format("Value_%s_%s", segmentName, entryId).getBytes()));
for (String segmentName : tableSegmentNames) {
for (int i = 0; i < entriesPerSegment; i++) {
opFutures.add(Futures.toVoid(tableStore.put(segmentName, Collections.singletonList(createTableEntry.apply(segmentName, i)), TIMEOUT)));
}
}
Futures.allOf(opFutures).join();
// 3. Instead of waiting for the Writer to move data to Storage, we invoke the flushToStorage to verify that all
// operations have been applied to Storage.
val forceFlush = container.flushToStorage(TIMEOUT);
forceFlush.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
checkStorage(segmentContents, lengths, container, context.storage);
// 4. Truncate all the data in the DurableLog and immediately shut down the container.
val truncateSeqNo = container.metadata.getClosestValidTruncationPoint(container.metadata.getOperationSequenceNumber());
durableLog.get().truncate(truncateSeqNo, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
container.close();
// 5. Create a new container instance (from the nearly empty DurableLog) and with an empty cache.
@Cleanup val container2 = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
container2.startAsync().awaitRunning();
// 5.1 Verify Segment Data.
for (val sc : segmentContents.entrySet()) {
// Contents.
byte[] expectedData = sc.getValue().toByteArray();
byte[] actualData = new byte[expectedData.length];
container2.read(sc.getKey(), 0, actualData.length, TIMEOUT).join().readRemaining(actualData, TIMEOUT);
Assert.assertArrayEquals("Unexpected contents for " + sc.getKey(), expectedData, actualData);
// Length.
val si = container2.getStreamSegmentInfo(sc.getKey(), TIMEOUT).join();
Assert.assertEquals("Unexpected length for " + sc.getKey(), expectedData.length, si.getLength());
// Attributes.
val attributes = container2.getAttributes(sc.getKey(), Collections.singleton(attributeReplace), false, TIMEOUT).join();
Assert.assertEquals("Unexpected attribute for " + sc.getKey(), expectedAttributeValue, (long) attributes.get(attributeReplace));
}
// 5.2 Verify table segment data.
val tableStore2 = container2.getExtension(ContainerTableExtension.class);
for (String segmentName : tableSegmentNames) {
for (int i = 0; i < entriesPerSegment; i++) {
val expected = createTableEntry.apply(segmentName, i);
val actual = tableStore2.get(segmentName, Collections.singletonList(expected.getKey().getKey()), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(0);
Assert.assertTrue("Unexpected Table Entry for " + segmentName + " at position " + i, expected.getKey().getKey().equals(actual.getKey().getKey()) && expected.getValue().equals(actual.getValue()));
}
}
// Ending Note: if all the above tests passed, we have implicitly validated that the Container Metadata Segment has also
// been properly flushed.
}
use of io.pravega.segmentstore.contracts.AttributeUpdate in project pravega by pravega.
the class StreamSegmentContainerTests method testExtendedAttributesConditionalUpdates.
/**
* Test conditional updates for Extended Attributes when they are not loaded in memory (i.e., they will need to be
* auto-fetched from the AttributeIndex so that the operation may succeed).
*/
@Test
public void testExtendedAttributesConditionalUpdates() throws Exception {
final AttributeId ea1 = AttributeId.uuid(0, 1);
final AttributeId ea2 = AttributeId.uuid(0, 2);
final List<AttributeId> allAttributes = Stream.of(ea1, ea2).collect(Collectors.toList());
// We set a longer Segment Expiration time, since this test executes more operations than the others.
final TestContainerConfig containerConfig = new TestContainerConfig();
containerConfig.setSegmentMetadataExpiration(Duration.ofMillis(EVICTION_SEGMENT_EXPIRATION_MILLIS_LONG));
AtomicInteger expectedAttributeValue = new AtomicInteger(0);
@Cleanup TestContext context = createContext();
OperationLogFactory localDurableLogFactory = new DurableLogFactory(FREQUENT_TRUNCATIONS_DURABLE_LOG_CONFIG, context.dataLogFactory, executorService());
@Cleanup MetadataCleanupContainer localContainer = new MetadataCleanupContainer(CONTAINER_ID, containerConfig, localDurableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
localContainer.startAsync().awaitRunning();
// 1. Create the StreamSegments and set the initial attribute values.
ArrayList<String> segmentNames = createSegments(localContainer);
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
// 2. Update some of the attributes.
expectedAttributeValue.set(1);
for (String segmentName : segmentNames) {
AttributeUpdateCollection attributeUpdates = allAttributes.stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
opFutures.add(localContainer.updateAttributes(segmentName, attributeUpdates, TIMEOUT));
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 3. Force these segments out of memory, so that we may verify conditional appends/updates on extended attributes.
localContainer.triggerMetadataCleanup(segmentNames).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 4. Execute various conditional operations using these attributes as comparison.
long compare = expectedAttributeValue.getAndIncrement();
long set = expectedAttributeValue.get();
boolean badUpdate = false;
for (String segmentName : segmentNames) {
if (badUpdate) {
// For every other segment, try to do a bad update, then a good one. This helps us verify both direct
// conditional operations, failed conditional operations and conditional operations with cached attributes.
AssertExtensions.assertSuppliedFutureThrows("Conditional append succeeded with incorrect compare value.", () -> localContainer.append(segmentName, getAppendData(segmentName, 0), AttributeUpdateCollection.from(new AttributeUpdate(ea1, AttributeUpdateType.ReplaceIfEquals, set, compare - 1)), TIMEOUT), ex -> (ex instanceof BadAttributeUpdateException) && !((BadAttributeUpdateException) ex).isPreviousValueMissing());
AssertExtensions.assertSuppliedFutureThrows("Conditional update-attributes succeeded with incorrect compare value.", () -> localContainer.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(ea2, AttributeUpdateType.ReplaceIfEquals, set, compare - 1)), TIMEOUT), ex -> (ex instanceof BadAttributeUpdateException) && !((BadAttributeUpdateException) ex).isPreviousValueMissing());
}
opFutures.add(Futures.toVoid(localContainer.append(segmentName, getAppendData(segmentName, 0), AttributeUpdateCollection.from(new AttributeUpdate(ea1, AttributeUpdateType.ReplaceIfEquals, set, compare)), TIMEOUT)));
opFutures.add(localContainer.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(ea2, AttributeUpdateType.ReplaceIfEquals, set, compare)), TIMEOUT));
badUpdate = !badUpdate;
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 4. Evict the segment from memory, then verify results.
localContainer.triggerMetadataCleanup(segmentNames).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
for (String segmentName : segmentNames) {
// Verify all attribute values.
val attributeValues = localContainer.getAttributes(segmentName, allAttributes, true, TIMEOUT).join();
val sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Unexpected value for non-cached attribute " + attributeId + " for segment " + segmentName, expectedAttributeValue.get(), (long) attributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for metadata attribute " + attributeId + " for segment " + segmentName, expectedAttributeValue.get(), (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
}
localContainer.stopAsync().awaitTerminated();
}
use of io.pravega.segmentstore.contracts.AttributeUpdate in project pravega by pravega.
the class StreamSegmentContainerTests method testConditionalTransactionOperationsWithWrongAttributes.
/**
* Test the createTransaction, append-to-Transaction, mergeTransaction methods with invalid attribute updates.
*/
@Test
public void testConditionalTransactionOperationsWithWrongAttributes() throws Exception {
@Cleanup TestContext context = createContext();
context.container.startAsync().awaitRunning();
// 1. Create the StreamSegments.
ArrayList<String> segmentNames = createSegments(context);
HashMap<String, ArrayList<String>> transactionsBySegment = createTransactions(segmentNames, context);
activateAllSegments(segmentNames, context);
transactionsBySegment.values().forEach(s -> activateAllSegments(s, context));
// 2. Add some appends.
HashMap<String, Long> lengths = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
appendToParentsAndTransactions(segmentNames, transactionsBySegment, lengths, segmentContents, context);
// 3. Wrongly update attribute on parent Segments. First, we update the attributes with a wrong value to
// validate that Segments do not get merged when attribute updates fail.
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
for (Map.Entry<String, ArrayList<String>> e : transactionsBySegment.entrySet()) {
String parentName = e.getKey();
for (String transactionName : e.getValue()) {
opFutures.add(context.container.updateAttributes(parentName, AttributeUpdateCollection.from(new AttributeUpdate(AttributeId.fromUUID(UUID.nameUUIDFromBytes(transactionName.getBytes())), AttributeUpdateType.None, 0)), TIMEOUT));
}
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 4. Merge all the Transaction and expect this to fail.
for (CompletableFuture<Void> mergeTransaction : mergeTransactions(transactionsBySegment, lengths, segmentContents, context, true)) {
AssertExtensions.assertMayThrow("If the transaction merge fails, it should be due to BadAttributeUpdateException", () -> mergeTransaction, ex -> ex instanceof BadAttributeUpdateException);
}
context.container.stopAsync().awaitTerminated();
}
use of io.pravega.segmentstore.contracts.AttributeUpdate in project pravega by pravega.
the class StreamSegmentContainerTests method testAttributes.
/**
* Tests the ability to set attributes (via append() or updateAttributes()), then fetch them back using getAttributes(),
* emphasizing on Extended Attributes that are dumped into Storage and cleared from memory.
*/
@Test
public void testAttributes() throws Exception {
final AttributeId coreAttribute = Attributes.EVENT_COUNT;
final int variableAttributeIdLength = 4;
final List<AttributeId> extendedAttributesUUID = Arrays.asList(AttributeId.randomUUID(), AttributeId.randomUUID());
final List<AttributeId> extendedAttributesVariable = Arrays.asList(AttributeId.random(variableAttributeIdLength), AttributeId.random(variableAttributeIdLength));
final List<AttributeId> allAttributesWithUUID = Stream.concat(extendedAttributesUUID.stream(), Stream.of(coreAttribute)).collect(Collectors.toList());
final List<AttributeId> allAttributesWithVariable = Stream.concat(extendedAttributesVariable.stream(), Stream.of(coreAttribute)).collect(Collectors.toList());
final AttributeId segmentLengthAttributeUUID = AttributeId.randomUUID();
final AttributeId segmentLengthAttributeVariable = AttributeId.random(variableAttributeIdLength);
final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
final TestContainerConfig containerConfig = new TestContainerConfig();
containerConfig.setSegmentMetadataExpiration(Duration.ofMillis(EVICTION_SEGMENT_EXPIRATION_MILLIS_SHORT));
containerConfig.setMaxCachedExtendedAttributeCount(SEGMENT_COUNT * allAttributesWithUUID.size());
@Cleanup TestContext context = createContext();
OperationLogFactory localDurableLogFactory = new DurableLogFactory(FREQUENT_TRUNCATIONS_DURABLE_LOG_CONFIG, context.dataLogFactory, executorService());
@Cleanup MetadataCleanupContainer localContainer = new MetadataCleanupContainer(CONTAINER_ID, containerConfig, localDurableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
localContainer.startAsync().awaitRunning();
// 1. Create the StreamSegments.
val segmentNames = IntStream.range(0, SEGMENT_COUNT).boxed().collect(Collectors.toMap(StreamSegmentContainerTests::getSegmentName, i -> i % 2 == 0 ? variableAttributeIdLength : 0));
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
for (val sn : segmentNames.entrySet()) {
opFutures.add(localContainer.createStreamSegment(sn.getKey(), SegmentType.STREAM_SEGMENT, AttributeUpdateCollection.from(new AttributeUpdate(Attributes.ATTRIBUTE_ID_LENGTH, AttributeUpdateType.None, sn.getValue())), TIMEOUT));
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
Predicate<Map.Entry<String, Integer>> isUUIDOnly = e -> e.getValue() == 0;
// 2. Add some appends.
for (val sn : segmentNames.entrySet()) {
boolean isUUID = isUUIDOnly.test(sn);
for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
AttributeUpdateCollection attributeUpdates = (isUUID ? allAttributesWithUUID : allAttributesWithVariable).stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
opFutures.add(Futures.toVoid(localContainer.append(sn.getKey(), getAppendData(sn.getKey(), i), attributeUpdates, TIMEOUT)));
}
}
// 2.1 Update some of the attributes.
for (val sn : segmentNames.entrySet()) {
boolean isUUID = isUUIDOnly.test(sn);
for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
AttributeUpdateCollection attributeUpdates = (isUUID ? allAttributesWithUUID : allAttributesWithVariable).stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
opFutures.add(localContainer.updateAttributes(sn.getKey(), attributeUpdates, TIMEOUT));
}
// Verify that we are not allowed to update attributes of the wrong type.
val badUpdate = new AttributeUpdate(isUUID ? AttributeId.random(variableAttributeIdLength) : AttributeId.randomUUID(), AttributeUpdateType.Accumulate, 1);
AssertExtensions.assertSuppliedFutureThrows("updateAttributes allowed updating attributes with wrong type and/or length.", () -> localContainer.updateAttributes(sn.getKey(), AttributeUpdateCollection.from(badUpdate), TIMEOUT), ex -> ex instanceof AttributeIdLengthMismatchException);
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 2.2 Dynamic attributes.
for (val sn : segmentNames.entrySet()) {
boolean isUUID = isUUIDOnly.test(sn);
val dynamicId = isUUID ? segmentLengthAttributeUUID : segmentLengthAttributeVariable;
val dynamicAttributes = AttributeUpdateCollection.from(new DynamicAttributeUpdate(dynamicId, AttributeUpdateType.Replace, DynamicAttributeValue.segmentLength(10)));
val appendData = getAppendData(sn.getKey(), 1000);
val lastOffset = localContainer.append(sn.getKey(), appendData, dynamicAttributes, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
val expectedValue = lastOffset - appendData.getLength() + 10;
Assert.assertEquals(expectedValue, (long) localContainer.getAttributes(sn.getKey(), Collections.singleton(dynamicId), false, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(dynamicId));
}
// 3. getSegmentInfo
for (val sn : segmentNames.entrySet()) {
val segmentName = sn.getKey();
val allAttributes = isUUIDOnly.test(sn) ? allAttributesWithUUID : allAttributesWithVariable;
val allAttributeValues = localContainer.getAttributes(segmentName, allAttributes, false, TIMEOUT).join();
Assert.assertEquals("Unexpected number of attributes retrieved via getAttributes().", allAttributes.size(), allAttributeValues.size());
// Verify all attribute values.
SegmentProperties sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getInfo() for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getAttributes() for segment " + segmentName, expectedAttributeValue, (long) allAttributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
// Verify we can't request wrong lengths/types.
val badId = isUUIDOnly.test(sn) ? AttributeId.random(variableAttributeIdLength) : AttributeId.randomUUID();
AssertExtensions.assertSuppliedFutureThrows("getAttributes allowed getting attributes with wrong type and/or length.", () -> localContainer.getAttributes(segmentName, Collections.singleton(badId), true, TIMEOUT), ex -> ex instanceof IllegalArgumentException);
}
// Force these segments out of memory, so that we may verify that extended attributes are still recoverable.
localContainer.triggerMetadataCleanup(segmentNames.keySet()).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
for (val sn : segmentNames.entrySet()) {
val segmentName = sn.getKey();
val allAttributes = isUUIDOnly.test(sn) ? allAttributesWithUUID : allAttributesWithVariable;
val allAttributeValues = localContainer.getAttributes(segmentName, allAttributes, false, TIMEOUT).join();
Assert.assertEquals("Unexpected number of attributes retrieved via getAttributes() after recovery for segment " + segmentName, allAttributes.size(), allAttributeValues.size());
// Verify all attribute values. Core attributes should still be loaded in memory, while extended attributes can
// only be fetched via their special API.
SegmentProperties sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getAttributes() after recovery for segment " + segmentName, expectedAttributeValue, (long) allAttributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
if (Attributes.isCoreAttribute(attributeId)) {
Assert.assertEquals("Expecting core attribute to be loaded in memory.", expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
} else {
Assert.assertEquals("Not expecting extended attribute to be loaded in memory.", Attributes.NULL_ATTRIBUTE_VALUE, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
}
// Now instruct the Container to cache missing values (do it a few times so we make sure it's idempotent).
// Also introduce some random new attribute to fetch. We want to make sure we can properly handle caching
// missing attribute values.
val missingAttributeId = isUUIDOnly.test(sn) ? AttributeId.randomUUID() : AttributeId.random(variableAttributeIdLength);
val attributesToCache = new ArrayList<>(allAttributes);
attributesToCache.add(missingAttributeId);
val attributesToCacheValues = new HashMap<>(allAttributeValues);
attributesToCacheValues.put(missingAttributeId, Attributes.NULL_ATTRIBUTE_VALUE);
Map<AttributeId, Long> allAttributeValuesWithCache;
for (int i = 0; i < 2; i++) {
allAttributeValuesWithCache = localContainer.getAttributes(segmentName, attributesToCache, true, TIMEOUT).join();
AssertExtensions.assertMapEquals("Inconsistent results from getAttributes(cache=true, attempt=" + i + ").", attributesToCacheValues, allAttributeValuesWithCache);
sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Expecting all attributes to be loaded in memory.", expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
Assert.assertEquals("Unexpected value for missing Attribute Id", Attributes.NULL_ATTRIBUTE_VALUE, (long) sp.getAttributes().get(missingAttributeId));
}
}
// 4. Make an update, then immediately seal the segment, then verify the update updated the root pointer.
AttributeId attr = Attributes.ATTRIBUTE_SEGMENT_ROOT_POINTER;
val oldRootPointers = new HashMap<String, Long>();
for (val sn : segmentNames.entrySet()) {
val segmentName = sn.getKey();
val newAttributeId = isUUIDOnly.test(sn) ? AttributeId.randomUUID() : AttributeId.random(variableAttributeIdLength);
// Get the old root pointer, then make a random attribute update, then immediately seal the segment.
localContainer.getAttributes(segmentName, Collections.singleton(attr), false, TIMEOUT).thenCompose(values -> {
oldRootPointers.put(segmentName, values.get(attr));
return CompletableFuture.allOf(localContainer.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(newAttributeId, AttributeUpdateType.Replace, 1L)), TIMEOUT), localContainer.sealStreamSegment(segmentName, TIMEOUT));
}).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
}
// which indicates the StorageWriter was able to successfully record it after its final Attribute Index update.
for (String segmentName : segmentNames.keySet()) {
Long oldValue = oldRootPointers.get(segmentName);
TestUtils.await(() -> {
val newVal = localContainer.getAttributes(segmentName, Collections.singleton(attr), false, TIMEOUT).join().get(attr);
return oldValue < newVal;
}, 10, TIMEOUT.toMillis());
}
waitForSegmentsInStorage(segmentNames.keySet(), localContainer, context).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
localContainer.stopAsync().awaitTerminated();
}
Aggregations