use of io.pravega.segmentstore.contracts.AttributeId in project pravega by pravega.
the class SegmentAttributeBTreeIndex method get.
@Override
public CompletableFuture<Map<AttributeId, Long>> get(@NonNull Collection<AttributeId> keys, @NonNull Duration timeout) {
ensureInitialized();
if (keys.isEmpty()) {
// Nothing to do.
return CompletableFuture.completedFuture(Collections.emptyMap());
}
// Keep two lists, one of keys (in some order) and one of serialized keys (in the same order).
val keyList = new ArrayList<AttributeId>(keys.size());
val serializedKeys = new ArrayList<ByteArraySegment>(keys.size());
for (AttributeId key : keys) {
keyList.add(key);
serializedKeys.add(this.keySerializer.serialize(key));
}
// segment file (see READ_RETRY Javadoc).
return READ_RETRY.runAsync(() -> this.index.get(serializedKeys, timeout), this.executor).thenApply(entries -> {
assert entries.size() == keys.size() : "Unexpected number of entries returned by the index search.";
// The index search result is a list of values in the same order as the keys we passed in, so we need
// to use the list index to match them.
Map<AttributeId, Long> result = new HashMap<>();
for (int i = 0; i < keyList.size(); i++) {
ByteArraySegment v = entries.get(i);
if (v != null) {
// BTreeIndex will return null if a key is not present; however we exclude that from our result.
result.put(keyList.get(i), deserializeValue(v));
}
}
return result;
}).exceptionally(this::handleIndexOperationException);
}
use of io.pravega.segmentstore.contracts.AttributeId in project pravega by pravega.
the class StreamSegmentContainerTests method testBasicConditionalMergeScenarios.
/**
* Test in detail the basic situations that a conditional segment merge can face.
*/
@Test
public void testBasicConditionalMergeScenarios() throws Exception {
@Cleanup TestContext context = createContext();
context.container.startAsync().awaitRunning();
final String parentSegment = "parentSegment";
// This will be the attribute update to execute against the parent segment.
Function<String, AttributeUpdateCollection> attributeUpdateForTxn = txnName -> AttributeUpdateCollection.from(new AttributeUpdate(AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes())), AttributeUpdateType.ReplaceIfEquals, txnName.hashCode() + 1, txnName.hashCode()));
Function<String, Long> getAttributeValue = txnName -> {
AttributeId attributeId = AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes()));
return context.container.getAttributes(parentSegment, Collections.singletonList(attributeId), true, TIMEOUT).join().get(attributeId);
};
// Create a parent Segment.
context.container.createStreamSegment(parentSegment, getSegmentType(parentSegment), null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
SegmentType segmentType = getSegmentType(parentSegment);
// Case 1: Create and empty transaction that fails to merge conditionally due to bad attributes.
String txnName = NameUtils.getTransactionNameFromId(parentSegment, UUID.randomUUID());
AttributeId txnAttributeId = AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes()));
context.container.createStreamSegment(txnName, segmentType, null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
AttributeUpdateCollection attributeUpdates = attributeUpdateForTxn.apply(txnName);
AssertExtensions.assertFutureThrows("Transaction was expected to fail on attribute update", context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT), ex -> ex instanceof BadAttributeUpdateException);
Assert.assertEquals(Attributes.NULL_ATTRIBUTE_VALUE, (long) getAttributeValue.apply(txnName));
// Case 2: Now, we prepare the attributes in the parent segment so the merge of the empty transaction succeeds.
context.container.updateAttributes(parentSegment, AttributeUpdateCollection.from(new AttributeUpdate(txnAttributeId, AttributeUpdateType.Replace, txnName.hashCode())), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// As the source segment is empty, the amount of merged data should be 0.
Assert.assertEquals(0L, context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).getMergedDataLength());
// But the attribute related to that transaction merge on the parent segment should have been updated.
Assert.assertEquals(txnName.hashCode() + 1L, (long) context.container.getAttributes(parentSegment, Collections.singletonList(txnAttributeId), true, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(txnAttributeId));
// Case 3: Create a non-empty transaction that should fail due to a conditional attribute update failure.
txnName = NameUtils.getTransactionNameFromId(parentSegment, UUID.randomUUID());
txnAttributeId = AttributeId.fromUUID(UUID.nameUUIDFromBytes(txnName.getBytes()));
attributeUpdates = attributeUpdateForTxn.apply(txnName);
context.container.createStreamSegment(txnName, segmentType, null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// Add some appends to the transaction.
RefCountByteArraySegment appendData = getAppendData(txnName, 1);
context.container.append(txnName, appendData, null, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// Attempt the conditional merge.
AssertExtensions.assertFutureThrows("Transaction was expected to fail on attribute update", context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT), ex -> ex instanceof BadAttributeUpdateException);
Assert.assertEquals(Attributes.NULL_ATTRIBUTE_VALUE, (long) getAttributeValue.apply(txnName));
// Case 4: Now, we prepare the attributes in the parent segment so the merge of the non-empty transaction succeeds.
context.container.updateAttributes(parentSegment, AttributeUpdateCollection.from(new AttributeUpdate(txnAttributeId, AttributeUpdateType.Replace, txnName.hashCode())), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// As the source segment is non-empty, the amount of merged data should be greater than 0.
Assert.assertTrue(context.container.mergeStreamSegment(parentSegment, txnName, attributeUpdates, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).getMergedDataLength() > 0);
// The attribute related to that transaction merge on the parent segment should have been updated as well.
Assert.assertEquals(txnName.hashCode() + 1L, (long) context.container.getAttributes(parentSegment, Collections.singletonList(txnAttributeId), true, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(txnAttributeId));
context.container.stopAsync().awaitTerminated();
}
use of io.pravega.segmentstore.contracts.AttributeId in project pravega by pravega.
the class StreamSegmentContainerTests method testForSegment.
/**
* Tests the forSegment() method. We test this here vs in StreamSegmentContainerTests because we want to exercise
* additional code in StreamSegmentService. This will invoke the StreamSegmentContainer code as well.
*/
@Test
public void testForSegment() {
AttributeId attributeId1 = AttributeId.randomUUID();
AttributeId attributeId2 = AttributeId.randomUUID();
AttributeId attributeId3 = AttributeId.randomUUID();
@Cleanup val context = createContext();
context.container.startAsync().awaitRunning();
// Create the StreamSegments.
val segmentNames = createSegments(context);
// Add some appends.
for (String segmentName : segmentNames) {
byte[] appendData = ("Append_" + segmentName).getBytes();
val dsa = context.container.forSegment(segmentName, TIMEOUT).join();
dsa.append(new ByteArraySegment(appendData), AttributeUpdateCollection.from(new AttributeUpdate(attributeId1, AttributeUpdateType.None, 1L)), TIMEOUT).join();
dsa.updateAttributes(AttributeUpdateCollection.from(new AttributeUpdate(attributeId2, AttributeUpdateType.None, 2L)), TIMEOUT).join();
dsa.append(new ByteArraySegment(appendData), AttributeUpdateCollection.from(new AttributeUpdate(attributeId3, AttributeUpdateType.None, 3L)), dsa.getInfo().getLength(), TIMEOUT).join();
dsa.seal(TIMEOUT).join();
dsa.truncate(1, TIMEOUT).join();
// Check metadata.
val info = dsa.getInfo();
Assert.assertEquals("Unexpected name.", segmentName, info.getName());
Assert.assertEquals("Unexpected length.", 2 * appendData.length, info.getLength());
Assert.assertEquals("Unexpected startOffset.", 1, info.getStartOffset());
Assert.assertEquals("Unexpected attribute count.", 3, info.getAttributes().keySet().stream().filter(id -> !AUTO_ATTRIBUTES.contains(id)).count());
Assert.assertEquals("Unexpected attribute 1.", 1L, (long) info.getAttributes().get(attributeId1));
Assert.assertEquals("Unexpected attribute 2.", 2L, (long) info.getAttributes().get(attributeId2));
Assert.assertEquals("Unexpected attribute 2.", 3L, (long) info.getAttributes().get(attributeId3));
Assert.assertTrue("Unexpected isSealed.", info.isSealed());
// Not expecting any in this case as they are disabled for this segment.
Assert.assertEquals(-1L, (long) dsa.getExtendedAttributeCount(TIMEOUT).join());
// Check written data.
byte[] readBuffer = new byte[appendData.length - 1];
@Cleanup val readResult = dsa.read(1, readBuffer.length, TIMEOUT);
val firstEntry = readResult.next();
firstEntry.requestContent(TIMEOUT);
val entryContents = firstEntry.getContent().join();
Assert.assertEquals("Unexpected number of bytes read.", readBuffer.length, entryContents.getLength());
entryContents.copyTo(ByteBuffer.wrap(readBuffer));
AssertExtensions.assertArrayEquals("Unexpected data read back.", appendData, 1, readBuffer, 0, readBuffer.length);
}
}
use of io.pravega.segmentstore.contracts.AttributeId in project pravega by pravega.
the class StreamSegmentContainerTests method testForceFlush.
/**
* Tests the {@link SegmentContainer#flushToStorage} method.
*/
@Test
public void testForceFlush() throws Exception {
final AttributeId attributeReplace = AttributeId.randomUUID();
final long expectedAttributeValue = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
final int entriesPerSegment = 10;
@Cleanup TestContext context = new TestContext(DEFAULT_CONFIG, NO_TRUNCATIONS_DURABLE_LOG_CONFIG, INFREQUENT_FLUSH_WRITER_CONFIG, null);
val durableLog = new AtomicReference<OperationLog>();
val durableLogFactory = new WatchableOperationLogFactory(context.operationLogFactory, durableLog::set);
@Cleanup val container = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
container.startAsync().awaitRunning();
Assert.assertNotNull(durableLog.get());
val tableStore = container.getExtension(ContainerTableExtension.class);
// 1. Create the StreamSegments and Table Segments.
ArrayList<String> segmentNames = new ArrayList<>();
ArrayList<String> tableSegmentNames = new ArrayList<>();
ArrayList<CompletableFuture<Void>> opFutures = new ArrayList<>();
for (int i = 0; i < SEGMENT_COUNT; i++) {
String segmentName = getSegmentName(i);
segmentNames.add(segmentName);
opFutures.add(container.createStreamSegment(segmentName, getSegmentType(segmentName), null, TIMEOUT));
}
for (int i = 0; i < SEGMENT_COUNT; i++) {
String segmentName = getSegmentName(i) + "_Table";
tableSegmentNames.add(segmentName);
val type = SegmentType.builder(getSegmentType(segmentName)).tableSegment().build();
opFutures.add(tableStore.createSegment(segmentName, type, TIMEOUT));
}
// 1.1 Wait for all segments to be created prior to using them.
Futures.allOf(opFutures).join();
opFutures.clear();
// 2. Add some appends and update some of the attributes.
HashMap<String, Long> lengths = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
for (String segmentName : segmentNames) {
for (int i = 0; i < APPENDS_PER_SEGMENT; i++) {
val attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, i + 1));
val appendData = getAppendData(segmentName, i);
long expectedLength = lengths.getOrDefault(segmentName, 0L) + appendData.getLength();
val append = (i % 2 == 0) ? container.append(segmentName, appendData, attributeUpdates, TIMEOUT) : container.append(segmentName, lengths.get(segmentName), appendData, attributeUpdates, TIMEOUT);
opFutures.add(Futures.toVoid(append));
lengths.put(segmentName, expectedLength);
recordAppend(segmentName, appendData, segmentContents, null);
}
for (int i = 0; i < ATTRIBUTE_UPDATES_PER_SEGMENT; i++) {
val attributeUpdates = AttributeUpdateCollection.from(new AttributeUpdate(attributeReplace, AttributeUpdateType.Replace, APPENDS_PER_SEGMENT + i + 1));
opFutures.add(container.updateAttributes(segmentName, attributeUpdates, TIMEOUT));
}
}
// 2.2 Add some entries to the table segments.
final BiFunction<String, Integer, TableEntry> createTableEntry = (segmentName, entryId) -> TableEntry.unversioned(new ByteArraySegment(String.format("Key_%s_%s", segmentName, entryId).getBytes()), new ByteArraySegment(String.format("Value_%s_%s", segmentName, entryId).getBytes()));
for (String segmentName : tableSegmentNames) {
for (int i = 0; i < entriesPerSegment; i++) {
opFutures.add(Futures.toVoid(tableStore.put(segmentName, Collections.singletonList(createTableEntry.apply(segmentName, i)), TIMEOUT)));
}
}
Futures.allOf(opFutures).join();
// 3. Instead of waiting for the Writer to move data to Storage, we invoke the flushToStorage to verify that all
// operations have been applied to Storage.
val forceFlush = container.flushToStorage(TIMEOUT);
forceFlush.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
checkStorage(segmentContents, lengths, container, context.storage);
// 4. Truncate all the data in the DurableLog and immediately shut down the container.
val truncateSeqNo = container.metadata.getClosestValidTruncationPoint(container.metadata.getOperationSequenceNumber());
durableLog.get().truncate(truncateSeqNo, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
container.close();
// 5. Create a new container instance (from the nearly empty DurableLog) and with an empty cache.
@Cleanup val container2 = new StreamSegmentContainer(CONTAINER_ID, DEFAULT_CONFIG, durableLogFactory, context.readIndexFactory, context.attributeIndexFactory, context.writerFactory, context.storageFactory, context.getDefaultExtensions(), executorService());
container2.startAsync().awaitRunning();
// 5.1 Verify Segment Data.
for (val sc : segmentContents.entrySet()) {
// Contents.
byte[] expectedData = sc.getValue().toByteArray();
byte[] actualData = new byte[expectedData.length];
container2.read(sc.getKey(), 0, actualData.length, TIMEOUT).join().readRemaining(actualData, TIMEOUT);
Assert.assertArrayEquals("Unexpected contents for " + sc.getKey(), expectedData, actualData);
// Length.
val si = container2.getStreamSegmentInfo(sc.getKey(), TIMEOUT).join();
Assert.assertEquals("Unexpected length for " + sc.getKey(), expectedData.length, si.getLength());
// Attributes.
val attributes = container2.getAttributes(sc.getKey(), Collections.singleton(attributeReplace), false, TIMEOUT).join();
Assert.assertEquals("Unexpected attribute for " + sc.getKey(), expectedAttributeValue, (long) attributes.get(attributeReplace));
}
// 5.2 Verify table segment data.
val tableStore2 = container2.getExtension(ContainerTableExtension.class);
for (String segmentName : tableSegmentNames) {
for (int i = 0; i < entriesPerSegment; i++) {
val expected = createTableEntry.apply(segmentName, i);
val actual = tableStore2.get(segmentName, Collections.singletonList(expected.getKey().getKey()), TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS).get(0);
Assert.assertTrue("Unexpected Table Entry for " + segmentName + " at position " + i, expected.getKey().getKey().equals(actual.getKey().getKey()) && expected.getValue().equals(actual.getValue()));
}
}
// Ending Note: if all the above tests passed, we have implicitly validated that the Container Metadata Segment has also
// been properly flushed.
}
use of io.pravega.segmentstore.contracts.AttributeId in project pravega by pravega.
the class StreamSegmentContainerTests method checkAttributeIterators.
private void checkAttributeIterators(DirectSegmentAccess segment, List<AttributeId> sortedAttributes, Map<AttributeId, Long> allExpectedValues) throws Exception {
int skip = sortedAttributes.size() / 10;
for (int i = 0; i < sortedAttributes.size() / 2; i += skip) {
AttributeId fromId = sortedAttributes.get(i);
AttributeId toId = sortedAttributes.get(sortedAttributes.size() - i - 1);
val expectedValues = allExpectedValues.entrySet().stream().filter(e -> fromId.compareTo(e.getKey()) <= 0 && toId.compareTo(e.getKey()) >= 0).sorted(Comparator.comparing(Map.Entry::getKey)).collect(Collectors.toList());
val actualValues = new ArrayList<Map.Entry<AttributeId, Long>>();
val ids = new HashSet<AttributeId>();
val iterator = segment.attributeIterator(fromId, toId, TIMEOUT).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
iterator.forEachRemaining(batch -> batch.forEach(attribute -> {
Assert.assertTrue("Duplicate key found.", ids.add(attribute.getKey()));
actualValues.add(attribute);
}), executorService()).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
AssertExtensions.assertListEquals("Unexpected iterator result.", expectedValues, actualValues, (e1, e2) -> e1.getKey().equals(e2.getKey()) && e1.getValue().equals(e2.getValue()));
}
}
Aggregations