use of org.junit.Assert.assertEquals in project pravega by pravega.
the class StreamSegmentContainerTests method testBasicConditionalMergeScenarios.
/**
* Test in detail the basic situations that a conditional segment merge can face.
*/
@Test
public void testBasicConditionalMergeScenarios() throws Exception {
@Cleanup TestContext context = createContext();
context.container.startAsync().awaitRunning();
final String parentSegment = "parentSegment";
// This will be the attribute update to execute against the parent segment.
Function<String, AttributeUpdateCollection> attributeUpdateForTxn = txnName -> AttributeUpdateCollection.from(new AttributeUpdate(AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes())), AttributeUpdateType.ReplaceIfEquals, txnName.hashCode() + 1, txnName.hashCode()));
Function<String, Long> getAttributeValue = txnName -> {
AttributeId attributeId = AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes()));
return context.container.getAttributes(parentSegment, Collections.singletonList(attributeId), true, TIMEOUT).join().get(attributeId);
};
// Create a parent Segment.
context.container.createStreamSegment(parentSegment, getSegmentType(parentSegment), null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
SegmentType segmentType = getSegmentType(parentSegment);
// Case 1: Create and empty transaction that fails to merge conditionally due to bad attributes.
String txnName = NameUtils.getTransactionNameFromId(parentSegment, UUID.randomUUID());
AttributeId txnAttributeId = AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes()));
context.container.createStreamSegment(txnName, segmentType, null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
AttributeUpdateCollection attributeUpdates = attributeUpdateForTxn.apply(txnName);
AssertExtensions.assertFutureThrows("Transaction was expected to fail on attribute update", context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT), ex -> ex instanceof BadAttributeUpdateException);
Assert.assertEquals(Attributes.NULL_ATTRIBUTE_VALUE, (long) getAttributeValue.apply(txnName));
// Case 2: Now, we prepare the attributes in the parent segment so the merge of the empty transaction succeeds.
context.container.updateAttributes(parentSegment, AttributeUpdateCollection.from(new AttributeUpdate(txnAttributeId, AttributeUpdateType.Replace, txnName.hashCode())), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// As the source segment is empty, the amount of merged data should be 0.
Assert.assertEquals(0L, context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).getMergedDataLength());
// But the attribute related to that transaction merge on the parent segment should have been updated.
Assert.assertEquals(txnName.hashCode() + 1L, (long) context.container.getAttributes(parentSegment, Collections.singletonList(txnAttributeId), true, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(txnAttributeId));
// Case 3: Create a non-empty transaction that should fail due to a conditional attribute update failure.
txnName = NameUtils.getTransactionNameFromId(parentSegment, UUID.randomUUID());
txnAttributeId = AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes()));
attributeUpdates = attributeUpdateForTxn.apply(txnName);
context.container.createStreamSegment(txnName, segmentType, null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// Add some appends to the transaction.
RefCountByteArraySegment appendData = getAppendData(txnName, 1);
context.container.append(txnName, appendData, null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// Attempt the conditional merge.
AssertExtensions.assertFutureThrows("Transaction was expected to fail on attribute update", context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT), ex -> ex instanceof BadAttributeUpdateException);
Assert.assertEquals(Attributes.NULL_ATTRIBUTE_VALUE, (long) getAttributeValue.apply(txnName));
// Case 4: Now, we prepare the attributes in the parent segment so the merge of the non-empty transaction succeeds.
context.container.updateAttributes(parentSegment, AttributeUpdateCollection.from(new AttributeUpdate(txnAttributeId, AttributeUpdateType.Replace, txnName.hashCode())), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// As the source segment is non-empty, the amount of merged data should be greater than 0.
Assert.assertTrue(context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).getMergedDataLength() > 0);
// The attribute related to that transaction merge on the parent segment should have been updated as well.
Assert.assertEquals(txnName.hashCode() + 1L, (long) context.container.getAttributes(parentSegment, Collections.singletonList(txnAttributeId), true, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(txnAttributeId));
context.container.stopAsync().awaitTerminated();
}
use of org.junit.Assert.assertEquals in project pravega by pravega.
the class StreamSegmentContainerTests method testForceFlush.
/**
* Tests the {@link SegmentContainer#flushToStorage} method.
*/
@Test
public void testForceFlush() throws Exception {
final AttributeId attributeReplace = AttributeId.randomUUID();
final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
final int entriesPerSegment = 10;
@Cleanup TestContext context = new TestContext(DEFAULT_CONFIG, NO_TRUNCATIONS_DURABLE_LOG_CONFIG, INFREQUENT_FLUSH_WRITER_CONFIG, null);
val durableLog = new AtomicReference<OperationLog>();
val durableLogFactory = new WatchableOperationLogFactory(context.operationLogFactory, durableLog::set);
@Cleanup val container = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
container.startAsync().awaitRunning();
Assert.assertNotNull(durableLog.get());
val tableStore = container.getExtension(ContainerTableExtension.class);
// 1. Create the StreamSegments and Table Segments.
ArrayList<String> segmentNames = new ArrayList<>();
ArrayList<String> tableSegmentNames = new ArrayList<>();
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
for (int i = 0; i < SEGMENT_COUNT; i++) {
String segmentName = getSegmentName(i);
segmentNames.add(segmentName);
opFutures.add(container.createStreamSegment(segmentName, getSegmentType(segmentName), null, TIMEOUT));
}
for (int i = 0; i < SEGMENT_COUNT; i++) {
String segmentName = getSegmentName(i) + "_Table";
tableSegmentNames.add(segmentName);
val type = SegmentType.builder(getSegmentType(segmentName)).tableSegment().build();
opFutures.add(tableStore.createSegment(segmentName, type, TIMEOUT));
}
// 1.1 Wait for all segments to be created prior to using them.
Futures.allOf(opFutures).join();
opFutures.clear();
// 2. Add some appends and update some of the attributes.
HashMap<String, Long> lengths = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
for (String segmentName : segmentNames) {
for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
val attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, i + 1));
val appendData = getAppendData(segmentName, i);
long expectedLength = lengths.getOrDefault(segmentName, 0L) + appendData.getLength();
val append = (i % 2 == 0) ? container.append(segmentName, appendData, attributeUpdates, TIMEOUT) : container.append(segmentName, lengths.get(segmentName), appendData, attributeUpdates, TIMEOUT);
opFutures.add(Futures.toVoid(append));
lengths.put(segmentName, expectedLength);
recordAppend(segmentName, appendData, segmentContents, null);
}
for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
val attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, APPENDS_PER_SEGMENT + i + 1));
opFutures.add(container.updateAttributes(segmentName, attributeUpdates, TIMEOUT));
}
}
// 2.2 Add some entries to the table segments.
final BiFunction<String, Integer, TableEntry> createTableEntry = (segmentName, entryId) -> TableEntry.unversioned(new ByteArraySegment(String.format("Key_%s_%s", segmentName, entryId).getBytes()), new ByteArraySegment(String.format("Value_%s_%s", segmentName, entryId).getBytes()));
for (String segmentName : tableSegmentNames) {
for (int i = 0; i < entriesPerSegment; i++) {
opFutures.add(Futures.toVoid(tableStore.put(segmentName, Collections.singletonList(createTableEntry.apply(segmentName, i)), TIMEOUT)));
}
}
Futures.allOf(opFutures).join();
// 3. Instead of waiting for the Writer to move data to Storage, we invoke the flushToStorage to verify that all
// operations have been applied to Storage.
val forceFlush = container.flushToStorage(TIMEOUT);
forceFlush.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
checkStorage(segmentContents, lengths, container, context.storage);
// 4. Truncate all the data in the DurableLog and immediately shut down the container.
val truncateSeqNo = container.metadata.getClosestValidTruncationPoint(container.metadata.getOperationSequenceNumber());
durableLog.get().truncate(truncateSeqNo, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
container.close();
// 5. Create a new container instance (from the nearly empty DurableLog) and with an empty cache.
@Cleanup val container2 = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
container2.startAsync().awaitRunning();
// 5.1 Verify Segment Data.
for (val sc : segmentContents.entrySet()) {
// Contents.
byte[] expectedData = sc.getValue().toByteArray();
byte[] actualData = new byte[expectedData.length];
container2.read(sc.getKey(), 0, actualData.length, TIMEOUT).join().readRemaining(actualData, TIMEOUT);
Assert.assertArrayEquals("Unexpected contents for " + sc.getKey(), expectedData, actualData);
// Length.
val si = container2.getStreamSegmentInfo(sc.getKey(), TIMEOUT).join();
Assert.assertEquals("Unexpected length for " + sc.getKey(), expectedData.length, si.getLength());
// Attributes.
val attributes = container2.getAttributes(sc.getKey(), Collections.singleton(attributeReplace), false, TIMEOUT).join();
Assert.assertEquals("Unexpected attribute for " + sc.getKey(), expectedAttributeValue, (long) attributes.get(attributeReplace));
}
// 5.2 Verify table segment data.
val tableStore2 = container2.getExtension(ContainerTableExtension.class);
for (String segmentName : tableSegmentNames) {
for (int i = 0; i < entriesPerSegment; i++) {
val expected = createTableEntry.apply(segmentName, i);
val actual = tableStore2.get(segmentName, Collections.singletonList(expected.getKey().getKey()), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(0);
Assert.assertTrue("Unexpected Table Entry for " + segmentName + " at position " + i, expected.getKey().getKey().equals(actual.getKey().getKey()) && expected.getValue().equals(actual.getValue()));
}
}
// Ending Note: if all the above tests passed, we have implicitly validated that the Container Metadata Segment has also
// been properly flushed.
}
use of org.junit.Assert.assertEquals in project pravega by pravega.
the class StreamSegmentContainerTests method testExtendedAttributesConditionalUpdates.
/**
* Test conditional updates for Extended Attributes when they are not loaded in memory (i.e., they will need to be
* auto-fetched from the AttributeIndex so that the operation may succeed).
*/
@Test
public void testExtendedAttributesConditionalUpdates() throws Exception {
final AttributeId ea1 = AttributeId.uuid(0, 1);
final AttributeId ea2 = AttributeId.uuid(0, 2);
final List<AttributeId> allAttributes = Stream.of(ea1, ea2).collect(Collectors.toList());
// We set a longer Segment Expiration time, since this test executes more operations than the others.
final TestContainerConfig containerConfig = new TestContainerConfig();
containerConfig.setSegmentMetadataExpiration(Duration.ofMillis(EVICTION_SEGMENT_EXPIRATION_MILLIS_LONG));
AtomicInteger expectedAttributeValue = new AtomicInteger(0);
@Cleanup TestContext context = createContext();
OperationLogFactory localDurableLogFactory = new DurableLogFactory(FREQUENT_TRUNCATIONS_DURABLE_LOG_CONFIG, context.dataLogFactory, executorService());
@Cleanup MetadataCleanupContainer localContainer = new MetadataCleanupContainer(CONTAINER_ID, containerConfig, localDurableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
localContainer.startAsync().awaitRunning();
// 1. Create the StreamSegments and set the initial attribute values.
ArrayList<String> segmentNames = createSegments(localContainer);
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
// 2. Update some of the attributes.
expectedAttributeValue.set(1);
for (String segmentName : segmentNames) {
AttributeUpdateCollection attributeUpdates = allAttributes.stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
opFutures.add(localContainer.updateAttributes(segmentName, attributeUpdates, TIMEOUT));
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 3. Force these segments out of memory, so that we may verify conditional appends/updates on extended attributes.
localContainer.triggerMetadataCleanup(segmentNames).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 4. Execute various conditional operations using these attributes as comparison.
long compare = expectedAttributeValue.getAndIncrement();
long set = expectedAttributeValue.get();
boolean badUpdate = false;
for (String segmentName : segmentNames) {
if (badUpdate) {
// For every other segment, try to do a bad update, then a good one. This helps us verify both direct
// conditional operations, failed conditional operations and conditional operations with cached attributes.
AssertExtensions.assertSuppliedFutureThrows("Conditional append succeeded with incorrect compare value.", () -> localContainer.append(segmentName, getAppendData(segmentName, 0), AttributeUpdateCollection.from(new AttributeUpdate(ea1, AttributeUpdateType.ReplaceIfEquals, set, compare - 1)), TIMEOUT), ex -> (ex instanceof BadAttributeUpdateException) && !((BadAttributeUpdateException) ex).isPreviousValueMissing());
AssertExtensions.assertSuppliedFutureThrows("Conditional update-attributes succeeded with incorrect compare value.", () -> localContainer.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(ea2, AttributeUpdateType.ReplaceIfEquals, set, compare - 1)), TIMEOUT), ex -> (ex instanceof BadAttributeUpdateException) && !((BadAttributeUpdateException) ex).isPreviousValueMissing());
}
opFutures.add(Futures.toVoid(localContainer.append(segmentName, getAppendData(segmentName, 0), AttributeUpdateCollection.from(new AttributeUpdate(ea1, AttributeUpdateType.ReplaceIfEquals, set, compare)), TIMEOUT)));
opFutures.add(localContainer.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(ea2, AttributeUpdateType.ReplaceIfEquals, set, compare)), TIMEOUT));
badUpdate = !badUpdate;
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 4. Evict the segment from memory, then verify results.
localContainer.triggerMetadataCleanup(segmentNames).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
for (String segmentName : segmentNames) {
// Verify all attribute values.
val attributeValues = localContainer.getAttributes(segmentName, allAttributes, true, TIMEOUT).join();
val sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Unexpected value for non-cached attribute " + attributeId + " for segment " + segmentName, expectedAttributeValue.get(), (long) attributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for metadata attribute " + attributeId + " for segment " + segmentName, expectedAttributeValue.get(), (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
}
localContainer.stopAsync().awaitTerminated();
}
use of org.junit.Assert.assertEquals in project pravega by pravega.
the class StreamSegmentContainerTests method testAttributes.
/**
* Tests the ability to set attributes (via append() or updateAttributes()), then fetch them back using getAttributes(),
* emphasizing on Extended Attributes that are dumped into Storage and cleared from memory.
*/
@Test
public void testAttributes() throws Exception {
final AttributeId coreAttribute = Attributes.EVENT_COUNT;
final int variableAttributeIdLength = 4;
final List<AttributeId> extendedAttributesUUID = Arrays.asList(AttributeId.randomUUID(), AttributeId.randomUUID());
final List<AttributeId> extendedAttributesVariable = Arrays.asList(AttributeId.random(variableAttributeIdLength), AttributeId.random(variableAttributeIdLength));
final List<AttributeId> allAttributesWithUUID = Stream.concat(extendedAttributesUUID.stream(), Stream.of(coreAttribute)).collect(Collectors.toList());
final List<AttributeId> allAttributesWithVariable = Stream.concat(extendedAttributesVariable.stream(), Stream.of(coreAttribute)).collect(Collectors.toList());
final AttributeId segmentLengthAttributeUUID = AttributeId.randomUUID();
final AttributeId segmentLengthAttributeVariable = AttributeId.random(variableAttributeIdLength);
final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
final TestContainerConfig containerConfig = new TestContainerConfig();
containerConfig.setSegmentMetadataExpiration(Duration.ofMillis(EVICTION_SEGMENT_EXPIRATION_MILLIS_SHORT));
containerConfig.setMaxCachedExtendedAttributeCount(SEGMENT_COUNT * allAttributesWithUUID.size());
@Cleanup TestContext context = createContext();
OperationLogFactory localDurableLogFactory = new DurableLogFactory(FREQUENT_TRUNCATIONS_DURABLE_LOG_CONFIG, context.dataLogFactory, executorService());
@Cleanup MetadataCleanupContainer localContainer = new MetadataCleanupContainer(CONTAINER_ID, containerConfig, localDurableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
localContainer.startAsync().awaitRunning();
// 1. Create the StreamSegments.
val segmentNames = IntStream.range(0, SEGMENT_COUNT).boxed().collect(Collectors.toMap(StreamSegmentContainerTests::getSegmentName, i -> i % 2 == 0 ? variableAttributeIdLength : 0));
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
for (val sn : segmentNames.entrySet()) {
opFutures.add(localContainer.createStreamSegment(sn.getKey(), SegmentType.STREAM_SEGMENT, AttributeUpdateCollection.from(new AttributeUpdate(Attributes.ATTRIBUTE_ID_LENGTH, AttributeUpdateType.None, sn.getValue())), TIMEOUT));
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
Predicate<Map.Entry<String, Integer>> isUUIDOnly = e -> e.getValue() == 0;
// 2. Add some appends.
for (val sn : segmentNames.entrySet()) {
boolean isUUID = isUUIDOnly.test(sn);
for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
AttributeUpdateCollection attributeUpdates = (isUUID ? allAttributesWithUUID : allAttributesWithVariable).stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
opFutures.add(Futures.toVoid(localContainer.append(sn.getKey(), getAppendData(sn.getKey(), i), attributeUpdates, TIMEOUT)));
}
}
// 2.1 Update some of the attributes.
for (val sn : segmentNames.entrySet()) {
boolean isUUID = isUUIDOnly.test(sn);
for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
AttributeUpdateCollection attributeUpdates = (isUUID ? allAttributesWithUUID : allAttributesWithVariable).stream().map(attributeId -> new AttributeUpdate(attributeId, AttributeUpdateType.Accumulate, 1)).collect(Collectors.toCollection(AttributeUpdateCollection::new));
opFutures.add(localContainer.updateAttributes(sn.getKey(), attributeUpdates, TIMEOUT));
}
// Verify that we are not allowed to update attributes of the wrong type.
val badUpdate = new AttributeUpdate(isUUID ? AttributeId.random(variableAttributeIdLength) : AttributeId.randomUUID(), AttributeUpdateType.Accumulate, 1);
AssertExtensions.assertSuppliedFutureThrows("updateAttributes allowed updating attributes with wrong type and/or length.", () -> localContainer.updateAttributes(sn.getKey(), AttributeUpdateCollection.from(badUpdate), TIMEOUT), ex -> ex instanceof AttributeIdLengthMismatchException);
}
Futures.allOf(opFutures).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// 2.2 Dynamic attributes.
for (val sn : segmentNames.entrySet()) {
boolean isUUID = isUUIDOnly.test(sn);
val dynamicId = isUUID ? segmentLengthAttributeUUID : segmentLengthAttributeVariable;
val dynamicAttributes = AttributeUpdateCollection.from(new DynamicAttributeUpdate(dynamicId, AttributeUpdateType.Replace, DynamicAttributeValue.segmentLength(10)));
val appendData = getAppendData(sn.getKey(), 1000);
val lastOffset = localContainer.append(sn.getKey(), appendData, dynamicAttributes, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
val expectedValue = lastOffset - appendData.getLength() + 10;
Assert.assertEquals(expectedValue, (long) localContainer.getAttributes(sn.getKey(), Collections.singleton(dynamicId), false, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(dynamicId));
}
// 3. getSegmentInfo
for (val sn : segmentNames.entrySet()) {
val segmentName = sn.getKey();
val allAttributes = isUUIDOnly.test(sn) ? allAttributesWithUUID : allAttributesWithVariable;
val allAttributeValues = localContainer.getAttributes(segmentName, allAttributes, false, TIMEOUT).join();
Assert.assertEquals("Unexpected number of attributes retrieved via getAttributes().", allAttributes.size(), allAttributeValues.size());
// Verify all attribute values.
SegmentProperties sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getInfo() for segment " + segmentName, expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getAttributes() for segment " + segmentName, expectedAttributeValue, (long) allAttributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
// Verify we can't request wrong lengths/types.
val badId = isUUIDOnly.test(sn) ? AttributeId.random(variableAttributeIdLength) : AttributeId.randomUUID();
AssertExtensions.assertSuppliedFutureThrows("getAttributes allowed getting attributes with wrong type and/or length.", () -> localContainer.getAttributes(segmentName, Collections.singleton(badId), true, TIMEOUT), ex -> ex instanceof IllegalArgumentException);
}
// Force these segments out of memory, so that we may verify that extended attributes are still recoverable.
localContainer.triggerMetadataCleanup(segmentNames.keySet()).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
for (val sn : segmentNames.entrySet()) {
val segmentName = sn.getKey();
val allAttributes = isUUIDOnly.test(sn) ? allAttributesWithUUID : allAttributesWithVariable;
val allAttributeValues = localContainer.getAttributes(segmentName, allAttributes, false, TIMEOUT).join();
Assert.assertEquals("Unexpected number of attributes retrieved via getAttributes() after recovery for segment " + segmentName, allAttributes.size(), allAttributeValues.size());
// Verify all attribute values. Core attributes should still be loaded in memory, while extended attributes can
// only be fetched via their special API.
SegmentProperties sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Unexpected value for attribute " + attributeId + " via getAttributes() after recovery for segment " + segmentName, expectedAttributeValue, (long) allAttributeValues.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
if (Attributes.isCoreAttribute(attributeId)) {
Assert.assertEquals("Expecting core attribute to be loaded in memory.", expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
} else {
Assert.assertEquals("Not expecting extended attribute to be loaded in memory.", Attributes.NULL_ATTRIBUTE_VALUE, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
}
// Now instruct the Container to cache missing values (do it a few times so we make sure it's idempotent).
// Also introduce some random new attribute to fetch. We want to make sure we can properly handle caching
// missing attribute values.
val missingAttributeId = isUUIDOnly.test(sn) ? AttributeId.randomUUID() : AttributeId.random(variableAttributeIdLength);
val attributesToCache = new ArrayList<>(allAttributes);
attributesToCache.add(missingAttributeId);
val attributesToCacheValues = new HashMap<>(allAttributeValues);
attributesToCacheValues.put(missingAttributeId, Attributes.NULL_ATTRIBUTE_VALUE);
Map<AttributeId, Long> allAttributeValuesWithCache;
for (int i = 0; i < 2; i++) {
allAttributeValuesWithCache = localContainer.getAttributes(segmentName, attributesToCache, true, TIMEOUT).join();
AssertExtensions.assertMapEquals("Inconsistent results from getAttributes(cache=true, attempt=" + i + ").", attributesToCacheValues, allAttributeValuesWithCache);
sp = localContainer.getStreamSegmentInfo(segmentName, TIMEOUT).join();
for (val attributeId : allAttributes) {
Assert.assertEquals("Expecting all attributes to be loaded in memory.", expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
}
Assert.assertEquals("Unexpected value for missing Attribute Id", Attributes.NULL_ATTRIBUTE_VALUE, (long) sp.getAttributes().get(missingAttributeId));
}
}
// 4. Make an update, then immediately seal the segment, then verify the update updated the root pointer.
AttributeId attr = Attributes.ATTRIBUTE_SEGMENT_ROOT_POINTER;
val oldRootPointers = new HashMap<String, Long>();
for (val sn : segmentNames.entrySet()) {
val segmentName = sn.getKey();
val newAttributeId = isUUIDOnly.test(sn) ? AttributeId.randomUUID() : AttributeId.random(variableAttributeIdLength);
// Get the old root pointer, then make a random attribute update, then immediately seal the segment.
localContainer.getAttributes(segmentName, Collections.singleton(attr), false, TIMEOUT).thenCompose(values -> {
oldRootPointers.put(segmentName, values.get(attr));
return CompletableFuture.allOf(localContainer.updateAttributes(segmentName, AttributeUpdateCollection.from(new AttributeUpdate(newAttributeId, AttributeUpdateType.Replace, 1L)), TIMEOUT), localContainer.sealStreamSegment(segmentName, TIMEOUT));
}).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
}
// which indicates the StorageWriter was able to successfully record it after its final Attribute Index update.
for (String segmentName : segmentNames.keySet()) {
Long oldValue = oldRootPointers.get(segmentName);
TestUtils.await(() -> {
val newVal = localContainer.getAttributes(segmentName, Collections.singleton(attr), false, TIMEOUT).join().get(attr);
return oldValue < newVal;
}, 10, TIMEOUT.toMillis());
}
waitForSegmentsInStorage(segmentNames.keySet(), localContainer, context).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
localContainer.stopAsync().awaitTerminated();
}
use of org.junit.Assert.assertEquals in project photon-model by vmware.
the class TestAWSClientManagement method testAWSClientManagementArn.
@Test
public void testAWSClientManagementArn() throws Throwable {
this.ec2ClientReferenceCount = getClientReferenceCount(AwsClientType.EC2);
this.host.setTimeoutSeconds(60);
// Getting a reference to client managers in the test
AWSClientManager ec2ClientManager = getClientManager(AwsClientType.EC2);
ec2ClientManager.cleanUpArnCache();
assertEquals(this.ec2ClientReferenceCount + 1, getClientReferenceCount(AwsClientType.EC2));
this.creds = new AuthCredentialsServiceState();
this.creds.customProperties = new HashMap<>();
this.creds.customProperties.put(ARN_KEY, this.arn);
this.creds.customProperties.put(EXTERNAL_ID_KEY, this.externalId);
TestContext waitContext = new TestContext(1, Duration.ofSeconds(30L));
ec2ClientManager.getOrCreateEC2ClientAsync(this.creds, TestAWSSetupUtils.regionId, this.instanceService).exceptionally(t -> {
waitContext.fail(t);
throw new CompletionException(t);
}).thenAccept(ec2Client -> {
this.client = ec2Client;
waitContext.complete();
});
waitContext.await();
Assert.assertNotNull(this.client);
this.clientCacheCount = ec2ClientManager.getCacheCount();
// Requesting another AWS client with the same set of credentials will not
// create a new entry in the cache
AmazonEC2AsyncClient oldClient = this.client;
TestContext nextContext = new TestContext(1, Duration.ofSeconds(30L));
ec2ClientManager.getOrCreateEC2ClientAsync(this.creds, TestAWSSetupUtils.regionId, this.instanceService).exceptionally(t -> {
nextContext.fail(t);
throw new CompletionException(t);
}).thenAccept(ec2Client -> {
this.client = ec2Client;
nextContext.complete();
});
nextContext.await();
Assert.assertNotNull(this.client);
Assert.assertEquals(oldClient, this.client);
assertEquals(this.clientCacheCount, ec2ClientManager.getCacheCount());
}
Aggregations