use of io.pravega.common.util.BufferView in project pravega by pravega.
the class TableServiceTests method testEndToEnd.
// endregion
/**
* Tests an End-to-End scenario for a {@link TableStore} implementation using a real implementation of {@link StreamSegmentStore}
* (without any mocks or manual event triggering or other test aids). Features tested:
* - Table Segment creation and deletion.
* - Conditional and unconditional updates.
* - Conditional and unconditional removals.
* - Recovering of Table Segments after failover.
*
* This tests both Hash Table Segments and Fixed-Key-Length Table Segments.
*/
@Test
public void testEndToEnd() throws Exception {
val rnd = new Random(0);
val segmentTypes = new SegmentType[] { SegmentType.builder().tableSegment().build(), SegmentType.builder().fixedKeyLengthTableSegment().build() };
ArrayList<String> segmentNames;
HashMap<BufferView, EntryData> keyInfo;
// Phase 1: Create some segments and update some data (unconditionally).
log.info("Starting Phase 1");
try (val builder = createBuilder()) {
val tableStore = builder.createTableStoreService();
// Create the Table Segments.
segmentNames = createSegments(tableStore, segmentTypes);
log.info("Created Segments: {}.", String.join(", ", segmentNames));
// Generate the keys and map them to segments.
keyInfo = generateKeysForSegments(segmentNames, rnd);
// Unconditional updates.
val updates = generateUpdates(keyInfo, false, rnd);
val updateVersions = executeUpdates(updates, tableStore);
acceptUpdates(updates, updateVersions, keyInfo);
log.info("Finished unconditional updates.");
// Check.
check(keyInfo, tableStore);
log.info("Finished Phase 1");
}
// Phase 2: Force a recovery and remove all data (unconditionally)
log.info("Starting Phase 2");
try (val builder = createBuilder()) {
val tableStore = builder.createTableStoreService();
// Check (after recovery)
check(keyInfo, tableStore);
// Unconditional removals.
val removals = generateRemovals(keyInfo, false);
executeRemovals(removals, tableStore);
acceptRemovals(removals, keyInfo);
// Check.
check(keyInfo, tableStore);
log.info("Finished Phase 2");
}
// Phase 3: Force a recovery and conditionally update and remove data
log.info("Starting Phase 3");
try (val builder = createBuilder()) {
val tableStore = builder.createTableStoreService();
// Check (after recovery).
check(keyInfo, tableStore);
// Conditional update.
val updates = generateUpdates(keyInfo, true, rnd);
val updateVersions = executeUpdates(updates, tableStore);
acceptUpdates(updates, updateVersions, keyInfo);
val offsetConditionedUpdates = generateUpdates(keyInfo, true, rnd);
val offsetUpdateVersions = executeOffsetConditionalUpdates(offsetConditionedUpdates, -1L, tableStore);
acceptUpdates(offsetConditionedUpdates, offsetUpdateVersions, keyInfo);
log.info("Finished conditional updates.");
// Check.
check(keyInfo, tableStore);
// Conditional remove.
val removals = generateRemovals(keyInfo, true);
executeRemovals(removals, tableStore);
acceptRemovals(removals, keyInfo);
val offsetConditionedRemovals = generateRemovals(keyInfo, true);
executeOffsetConditonalRemovals(offsetConditionedRemovals, -1L, tableStore);
acceptRemovals(offsetConditionedRemovals, keyInfo);
log.info("Finished conditional removes.");
// Check.
check(keyInfo, tableStore);
log.info("Finished Phase 3");
}
// Phase 4: Force a recovery and conditionally remove all data
log.info("Starting Phase 4");
try (val builder = createBuilder()) {
val tableStore = builder.createTableStoreService();
// Check (after recovery)
check(keyInfo, tableStore);
// Conditional update again.
val updates = generateUpdates(keyInfo, true, rnd);
val updateVersions = executeUpdates(updates, tableStore);
acceptUpdates(updates, updateVersions, keyInfo);
log.info("Finished conditional updates.");
// Check.
check(keyInfo, tableStore);
// Delete all.
val deletions = segmentNames.stream().map(s -> tableStore.deleteSegment(s, false, TIMEOUT)).collect(Collectors.toList());
Futures.allOf(deletions).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
log.info("Finished Phase 4");
}
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class TableServiceTests method processDeltaIteratorItems.
private List<TableEntry> processDeltaIteratorItems(List<IteratorItem<TableEntry>> entries) {
Map<BufferView, TableEntry> result = new HashMap<>();
for (val item : entries) {
TableEntry entry = item.getEntries().iterator().next();
DeltaIteratorState state = DeltaIteratorState.deserialize(item.getState());
if (state.isDeletionRecord() && result.containsKey(entry.getKey().getKey())) {
result.remove(entry.getKey().getKey());
} else {
result.compute(entry.getKey().getKey(), (key, value) -> {
if (value == null) {
return entry;
} else {
return value.getKey().getVersion() < entry.getKey().getVersion() ? entry : value;
}
});
}
}
return new ArrayList<>(result.values());
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class WriterTableProcessorTests method generateAndPopulateEntries.
private ArrayList<TestBatchData> generateAndPopulateEntries(TestContext context) {
val result = new ArrayList<TestBatchData>();
int count = 0;
while (count < UPDATE_COUNT) {
int batchSize = Math.min(UPDATE_BATCH_SIZE, UPDATE_COUNT - count);
Map<BufferView, TableEntry> prevState = result.isEmpty() ? Collections.emptyMap() : result.get(result.size() - 1).expectedEntries;
result.add(generateAndPopulateEntriesBatch(batchSize, prevState, context));
count += batchSize;
}
return result;
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class DeltaIteratorStateTests method testSerialization.
@Test
public void testSerialization() {
DeltaIteratorState one = new DeltaIteratorState(1, true, false, true);
DeltaIteratorState two = new DeltaIteratorState(1, true, false, true);
BufferView bufferOne = one.serialize();
BufferView bufferTwo = two.serialize();
// Make sure that two state objects with identical arguments get serialized to same byte array.
Assert.assertArrayEquals(bufferOne.getCopy(), bufferTwo.getCopy());
// Check that they are now deserialized back to their original form.
DeltaIteratorState stateOne = DeltaIteratorState.deserialize(bufferOne);
DeltaIteratorState stateTwo = DeltaIteratorState.deserialize(bufferTwo);
Assert.assertTrue(testEquals(stateOne, one));
Assert.assertTrue(testEquals(stateTwo, two));
// Also check that they are deserialized back into equal objects.
Assert.assertTrue(testEquals(stateOne, stateTwo));
}
use of io.pravega.common.util.BufferView in project pravega by pravega.
the class IndexReaderWriterTests method testUpdateAndRemove.
private void testUpdateAndRemove(KeyHasher hasher) {
final int batchSizeBase = 200;
// This should be smaller than batchSizeBase.
final int iterationCount = 200;
val rnd = new Random(0);
val w = newWriter(hasher);
val segment = newMock();
// Generate batches and update them at once.
long offset = 0;
val existingKeys = new HashMap<Long, BufferView>();
val allKeys = new HashSet<BufferView>();
int maxUpdateBatchSize = batchSizeBase + 1;
int maxRemoveBatchSize = 1;
for (int i = 0; i < iterationCount; i++) {
// Insert/Update a set of keys. With every iteration, we update fewer and fewer.
int updateBatchSize = rnd.nextInt(maxUpdateBatchSize) + 1;
val updateBatch = generateUpdateBatch(updateBatchSize, offset, rnd);
offset = updateKeys(updateBatch, w, existingKeys, segment);
allKeys.addAll(updateBatch.keySet());
// Remove a set of keys. With every iteration, we remove more and more.
// Pick existing keys at random, and delete them.
int removeBatchSize = rnd.nextInt(maxRemoveBatchSize) + 1;
val removeBatch = new HashMap<BufferView, Long>();
val remainingKeys = new ArrayList<>(existingKeys.values());
int batchOffset = 0;
while (removeBatch.size() < removeBatchSize && removeBatch.size() < remainingKeys.size()) {
BufferView key;
do {
key = remainingKeys.get(rnd.nextInt(remainingKeys.size()));
} while (removeBatch.containsKey(key));
removeBatch.put(key, encodeOffset(offset + batchOffset, true));
}
// Pick a non-existing key, and add it too.
BufferView nonExistingKey;
do {
byte[] b = new byte[rnd.nextInt(MAX_KEY_LENGTH) + 1];
rnd.nextBytes(b);
nonExistingKey = new ByteArraySegment(b);
} while (allKeys.contains(nonExistingKey));
removeBatch.put(nonExistingKey, encodeOffset(offset + batchOffset, true));
// Apply the removal.
offset = updateKeys(removeBatch, w, existingKeys, segment);
maxUpdateBatchSize -= batchSizeBase / iterationCount;
maxRemoveBatchSize += batchSizeBase / iterationCount;
}
// Verify index.
checkIndex(allKeys, existingKeys, w, hasher, segment);
}
Aggregations