use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class SegmentHelperMock method getSegmentHelperMockForTables.
public static SegmentHelper getSegmentHelperMockForTables(ScheduledExecutorService executor) {
SegmentHelper helper = getSegmentHelperMock();
final Object lock = new Object();
final Map<String, Map<ByteBuffer, TableSegmentEntry>> mapOfTables = new HashMap<>();
final Map<String, Map<ByteBuffer, Long>> mapOfTablesPosition = new HashMap<>();
// region create table
doAnswer(x -> {
String tableName = x.getArgument(0);
return CompletableFuture.runAsync(() -> {
synchronized (lock) {
mapOfTables.putIfAbsent(tableName, new HashMap<>());
mapOfTablesPosition.putIfAbsent(tableName, new HashMap<>());
}
}, executor);
}).when(helper).createTableSegment(anyString(), anyString(), anyLong(), anyBoolean(), anyInt(), anyLong());
// endregion
// region delete table
doAnswer(x -> {
String tableName = x.getArgument(0);
Boolean mustBeEmpty = x.getArgument(1);
final WireCommandType type = WireCommandType.DELETE_TABLE_SEGMENT;
return CompletableFuture.supplyAsync(() -> {
synchronized (lock) {
if (!mapOfTables.containsKey(tableName)) {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist);
}
boolean empty = Optional.ofNullable(mapOfTables.get(tableName)).orElse(Collections.emptyMap()).isEmpty();
if (!mustBeEmpty || empty) {
mapOfTables.remove(tableName);
mapOfTablesPosition.remove(tableName);
return null;
} else {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableSegmentNotEmpty);
}
}
}, executor);
}).when(helper).deleteTableSegment(anyString(), anyBoolean(), anyString(), anyLong());
// endregion
// region update keys
doAnswer(x -> {
final WireCommandType type = WireCommandType.UPDATE_TABLE_ENTRIES;
String tableName = x.getArgument(0);
List<TableSegmentEntry> entries = x.getArgument(1);
return CompletableFuture.supplyAsync(() -> {
synchronized (lock) {
Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName);
Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName);
if (table == null) {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist);
} else {
List<TableSegmentKeyVersion> resultList = new LinkedList<>();
entries.forEach(entry -> {
ByteBuffer key = entry.getKey().getKey().copy().nioBuffer();
byte[] value = entry.getValue().copy().array();
TableSegmentEntry existingEntry = table.get(key);
if (existingEntry == null) {
if (entry.getKey().getVersion().equals(TableSegmentKeyVersion.NOT_EXISTS)) {
TableSegmentEntry newEntry = TableSegmentEntry.versioned(key.array(), value, 0);
table.put(key, newEntry);
tablePos.put(key, System.nanoTime());
resultList.add(newEntry.getKey().getVersion());
} else {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyDoesNotExist);
}
} else if (existingEntry.getKey().getVersion().equals(entry.getKey().getVersion())) {
TableSegmentKeyVersion newVersion = TableSegmentKeyVersion.from(existingEntry.getKey().getVersion().getSegmentVersion() + 1);
TableSegmentEntry newEntry = TableSegmentEntry.versioned(key.array(), value, newVersion.getSegmentVersion());
table.put(key, newEntry);
tablePos.put(key, System.nanoTime());
resultList.add(newVersion);
} else {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyBadVersion);
}
});
return resultList;
}
}
}, executor);
}).when(helper).updateTableEntries(anyString(), any(), anyString(), anyLong());
// endregion
// region remove keys
doAnswer(x -> {
final WireCommandType type = WireCommandType.REMOVE_TABLE_KEYS;
String tableName = x.getArgument(0);
List<TableSegmentKey> keys = x.getArgument(1);
return CompletableFuture.runAsync(() -> {
synchronized (lock) {
Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName);
Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName);
if (table == null) {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist);
} else {
keys.forEach(rawKey -> {
ByteBuffer key = rawKey.getKey().copy().nioBuffer();
TableSegmentEntry existingEntry = table.get(key);
if (existingEntry != null) {
if (existingEntry.getKey().getVersion().equals(rawKey.getVersion()) || rawKey.getVersion() == null || rawKey.getVersion().equals(TableSegmentKeyVersion.NO_VERSION)) {
table.remove(key);
tablePos.remove(key);
} else {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyBadVersion);
}
}
});
}
}
}, executor);
}).when(helper).removeTableKeys(anyString(), any(), anyString(), anyLong());
// endregion
// region read keys
doAnswer(x -> {
final WireCommandType type = WireCommandType.READ_TABLE;
String tableName = x.getArgument(0);
List<TableSegmentKey> requestKeys = x.getArgument(1);
return CompletableFuture.supplyAsync(() -> {
synchronized (lock) {
Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName);
if (table == null) {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist);
} else {
List<TableSegmentEntry> resultList = new LinkedList<>();
requestKeys.forEach(requestKey -> {
ByteBuffer key = requestKey.getKey().copy().nioBuffer();
TableSegmentEntry existingEntry = table.get(key);
if (existingEntry == null) {
resultList.add(TableSegmentEntry.notExists(key.array(), new byte[0]));
} else if (existingEntry.getKey().getVersion().equals(requestKey.getVersion()) || requestKey.getVersion() == null || requestKey.getVersion().equals(TableSegmentKeyVersion.NO_VERSION)) {
resultList.add(duplicate(existingEntry));
} else {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.TableKeyBadVersion);
}
});
return resultList;
}
}
}, executor);
}).when(helper).readTable(anyString(), any(), anyString(), anyLong());
// endregion
// region readTableKeys
doAnswer(x -> {
String tableName = x.getArgument(0);
int limit = x.getArgument(1);
HashTableIteratorItem.State state = x.getArgument(2);
final WireCommandType type = WireCommandType.READ_TABLE;
return CompletableFuture.supplyAsync(() -> {
synchronized (lock) {
Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName);
Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName);
if (table == null) {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist);
} else {
long floor;
if (state.equals(HashTableIteratorItem.State.EMPTY)) {
floor = 0L;
} else {
floor = new ByteArraySegment(state.toBytes()).getLong(0);
}
AtomicLong token = new AtomicLong(floor);
List<TableSegmentKey> list = tablePos.entrySet().stream().sorted(Comparator.comparingLong(Map.Entry::getValue)).filter(c -> c.getValue() > floor).map(r -> {
token.set(r.getValue());
return duplicate(table.get(r.getKey()).getKey());
}).limit(limit).collect(Collectors.toList());
byte[] continuationToken = new byte[Long.BYTES];
BitConverter.writeLong(continuationToken, 0, token.get());
HashTableIteratorItem.State newState = HashTableIteratorItem.State.fromBytes(Unpooled.wrappedBuffer(continuationToken));
return new HashTableIteratorItem<>(newState, list);
}
}
}, executor);
}).when(helper).readTableKeys(anyString(), anyInt(), any(), anyString(), anyLong());
// endregion
// region readTableEntries
doAnswer(x -> {
String tableName = x.getArgument(0);
int limit = x.getArgument(1);
HashTableIteratorItem.State state = x.getArgument(2);
final WireCommandType type = WireCommandType.READ_TABLE;
return CompletableFuture.supplyAsync(() -> {
synchronized (lock) {
Map<ByteBuffer, TableSegmentEntry> table = mapOfTables.get(tableName);
Map<ByteBuffer, Long> tablePos = mapOfTablesPosition.get(tableName);
if (table == null) {
throw new WireCommandFailedException(type, WireCommandFailedException.Reason.SegmentDoesNotExist);
} else {
long floor;
if (state.equals(HashTableIteratorItem.State.EMPTY)) {
floor = 0L;
} else {
floor = new ByteArraySegment(state.toBytes()).getLong(0);
}
AtomicLong token = new AtomicLong(floor);
List<TableSegmentEntry> list = tablePos.entrySet().stream().sorted(Comparator.comparingLong(Map.Entry::getValue)).filter(c -> c.getValue() > floor).map(r -> {
token.set(r.getValue());
return duplicate(table.get(r.getKey()));
}).limit(limit).collect(Collectors.toList());
byte[] continuationToken = new byte[Long.BYTES];
BitConverter.writeLong(continuationToken, 0, token.get());
HashTableIteratorItem.State newState = HashTableIteratorItem.State.fromBytes(Unpooled.wrappedBuffer(continuationToken));
return new HashTableIteratorItem<>(newState, list);
}
}
}, executor);
}).when(helper).readTableEntries(anyString(), anyInt(), any(), anyString(), anyLong());
// endregion
return helper;
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class PravegaTablesStreamMetadataStoreTest method testPartiallyCreatedScope.
@Test
public void testPartiallyCreatedScope() {
PravegaTablesStreamMetadataStore store = (PravegaTablesStreamMetadataStore) this.store;
PravegaTablesStoreHelper storeHelper = store.getStoreHelper();
String newScope = "newScope";
Controller.CreateScopeStatus status = store.createScope(newScope, null, executor).join();
assertEquals(Controller.CreateScopeStatus.Status.SUCCESS, status.getStatus());
status = store.createScope(newScope, null, executor).join();
assertEquals(Controller.CreateScopeStatus.Status.SCOPE_EXISTS, status.getStatus());
// now partially create a scope
String scopeName = "partial";
byte[] idBytes = new byte[2 * Long.BYTES];
UUID id = UUID.randomUUID();
BitConverter.writeUUID(new ByteArraySegment(idBytes), id);
// add entry for a scope in scopes table
storeHelper.addNewEntry(PravegaTablesStreamMetadataStore.SCOPES_TABLE, scopeName, idBytes, x -> x, 0L).join();
// verify that streams in scope table does not exist
OperationContext context = store.createScopeContext(scopeName, 0L);
PravegaTablesScope scope = (PravegaTablesScope) store.getScope(scopeName, context);
ByteBuf token = Unpooled.wrappedBuffer(Base64.getDecoder().decode(""));
Supplier<CompletableFuture<Map.Entry<ByteBuf, List<String>>>> tableCheckSupplier = () -> scope.getStreamsInScopeTableName(context).thenCompose(tableName -> storeHelper.getKeysPaginated(tableName, token, 10, 0L));
AssertExtensions.assertFutureThrows("Table should not exist", tableCheckSupplier.get(), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
Supplier<CompletableFuture<Map.Entry<ByteBuf, List<String>>>> kvttableCheckSupplier = () -> scope.getKVTablesInScopeTableName(context).thenCompose(tableName -> storeHelper.getKeysPaginated(tableName, token, 10, 0L));
AssertExtensions.assertFutureThrows("Table should not exist", kvttableCheckSupplier.get(), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
Supplier<CompletableFuture<?>> rgTableCheckSupplier = () -> scope.getReaderGroupsInScopeTableName(context).thenCompose(tableName -> storeHelper.getKeysPaginated(tableName, token, 10, 0L));
AssertExtensions.assertFutureThrows("RG Table should not exist", rgTableCheckSupplier.get(), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
assertEquals(Collections.emptyList(), scope.listStreamsInScope(context).join());
Pair<List<String>, String> listStreams = scope.listStreams(10, "", executor, context).join();
assertEquals(Collections.emptyList(), listStreams.getKey());
assertTrue(Strings.isNullOrEmpty(listStreams.getValue()));
Pair<List<String>, String> listKvts = scope.listKeyValueTables(10, "", executor, context).join();
assertEquals(Collections.emptyList(), listKvts.getKey());
assertTrue(Strings.isNullOrEmpty(listKvts.getValue()));
scope.addStreamToScope("stream", context).join();
assertEquals("stream", scope.listStreamsInScope(context).join().get(0));
assertTrue(Futures.await(tableCheckSupplier.get()));
UUID rgId = UUID.randomUUID();
String rgName = "rg1";
scope.addReaderGroupToScope(rgName, rgId, context).join();
assertEquals(rgId, scope.getReaderGroupId(rgName, context).join());
assertTrue(Futures.await(rgTableCheckSupplier.get()));
scope.addKVTableToScope("kvt", UUID.randomUUID(), context).join();
assertEquals("kvt", scope.listKeyValueTables(10, "", executor, context).join().getKey().get(0));
assertTrue(Futures.await(kvttableCheckSupplier.get()));
// create scope idempotent
status = store.createScope(scopeName, null, executor).join();
assertEquals(Controller.CreateScopeStatus.Status.SCOPE_EXISTS, status.getStatus());
PravegaTablesStoreHelper spy = spy(storeHelper);
PravegaTablesScope scopeObj = new PravegaTablesScope("thirdScope", spy);
StoreException unknown = StoreException.create(StoreException.Type.UNKNOWN, "unknown");
doReturn(Futures.failedFuture(unknown)).when(spy).addNewEntry(anyString(), anyString(), any(), any(), anyLong());
AssertExtensions.assertFutureThrows("Create scope should have thrown exception", scopeObj.createScope(context), e -> Exceptions.unwrap(e).equals(unknown));
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class SegmentAttributeBTreeIndex method serializeValue.
private ByteArraySegment serializeValue(Long value) {
if (value == null || value == Attributes.NULL_ATTRIBUTE_VALUE) {
// Deletion.
return null;
}
ByteArraySegment result = new ByteArraySegment(new byte[VALUE_LENGTH]);
result.setLong(0, value);
return result;
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class PravegaRequestProcessorTest method testGetTableEntries.
@Test
public void testGetTableEntries() throws Exception {
// Set up PravegaRequestProcessor instance to execute requests against
val rnd = new Random(0);
String tableSegmentName = "testGetTableEntries";
@Cleanup ServiceBuilder serviceBuilder = newInlineExecutionInMemoryBuilder(getBuilderConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
ServerConnection connection = mock(ServerConnection.class);
InOrder order = inOrder(connection);
val recorderMock = mock(TableSegmentStatsRecorder.class);
PravegaRequestProcessor processor = new PravegaRequestProcessor(store, tableStore, new TrackedConnection(connection), SegmentStatsRecorder.noOp(), recorderMock, new PassingTokenVerifier(), false);
// Generate keys.
ArrayList<ArrayView> keys = generateKeys(3, rnd);
ArrayView testValue = generateValue(rnd);
TableEntry e1 = TableEntry.unversioned(keys.get(0), testValue);
TableEntry e2 = TableEntry.unversioned(keys.get(1), testValue);
TableEntry e3 = TableEntry.unversioned(keys.get(2), testValue);
// Create a table segment and add data.
processor.createTableSegment(new WireCommands.CreateTableSegment(1, tableSegmentName, false, 0, "", 0));
order.verify(connection).send(new WireCommands.SegmentCreated(1, tableSegmentName));
verify(recorderMock).createTableSegment(eq(tableSegmentName), any());
processor.updateTableEntries(new WireCommands.UpdateTableEntries(2, tableSegmentName, "", getTableEntries(asList(e1, e2, e3)), WireCommands.NULL_TABLE_SEGMENT_OFFSET));
verify(recorderMock).updateEntries(eq(tableSegmentName), eq(3), eq(false), any());
// 1. Now read the table entries where suggestedEntryCount is equal to number of entries in the Table Store.
WireCommands.TableIteratorArgs args = new WireCommands.TableIteratorArgs(Unpooled.EMPTY_BUFFER, Unpooled.EMPTY_BUFFER, Unpooled.EMPTY_BUFFER, Unpooled.EMPTY_BUFFER);
processor.readTableEntries(new WireCommands.ReadTableEntries(3, tableSegmentName, "", 3, args));
// Capture the WireCommands sent.
ArgumentCaptor<WireCommand> wireCommandsCaptor = ArgumentCaptor.forClass(WireCommand.class);
order.verify(connection, times(2)).send(wireCommandsCaptor.capture());
verify(recorderMock).iterateEntries(eq(tableSegmentName), eq(3), any());
// Verify the WireCommands.
List<Long> keyVersions = ((WireCommands.TableEntriesUpdated) wireCommandsCaptor.getAllValues().get(0)).getUpdatedVersions();
WireCommands.TableEntriesRead getTableEntriesIteratorsResp = (WireCommands.TableEntriesRead) wireCommandsCaptor.getAllValues().get(1);
assertTrue(getTableEntriesIteratorsResp.getEntries().getEntries().stream().map(e -> e.getKey().getKeyVersion()).collect(Collectors.toList()).containsAll(keyVersions));
// Verify if the value is correct.
assertTrue(getTableEntriesIteratorsResp.getEntries().getEntries().stream().allMatch(e -> {
ByteBuf buf = e.getValue().getData();
byte[] bytes = new byte[buf.readableBytes()];
buf.getBytes(buf.readerIndex(), bytes);
return testValue.equals(new ByteArraySegment(bytes));
}));
// 2. Now read the table keys where suggestedEntryCount is less than the number of entries in the Table Store.
processor.readTableEntries(new WireCommands.ReadTableEntries(3, tableSegmentName, "", 1, args));
// Capture the WireCommands sent.
ArgumentCaptor<WireCommands.TableEntriesRead> tableEntriesCaptor = ArgumentCaptor.forClass(WireCommands.TableEntriesRead.class);
order.verify(connection, times(1)).send(tableEntriesCaptor.capture());
// Verify the WireCommands.
getTableEntriesIteratorsResp = tableEntriesCaptor.getAllValues().get(0);
assertEquals(1, getTableEntriesIteratorsResp.getEntries().getEntries().size());
assertTrue(keyVersions.contains(getTableEntriesIteratorsResp.getEntries().getEntries().get(0).getKey().getKeyVersion()));
// Get the last state.
ByteBuf state = getTableEntriesIteratorsResp.getContinuationToken();
args = new WireCommands.TableIteratorArgs(state, Unpooled.EMPTY_BUFFER, Unpooled.EMPTY_BUFFER, Unpooled.EMPTY_BUFFER);
// 3. Now read the remaining table entries by providing a higher suggestedKeyCount and the state to the iterator.
processor.readTableEntries(new WireCommands.ReadTableEntries(3, tableSegmentName, "", 3, args));
// Capture the WireCommands sent.
tableEntriesCaptor = ArgumentCaptor.forClass(WireCommands.TableEntriesRead.class);
order.verify(connection, times(1)).send(tableEntriesCaptor.capture());
verify(recorderMock).iterateEntries(eq(tableSegmentName), eq(1), any());
// Verify the WireCommands.
getTableEntriesIteratorsResp = tableEntriesCaptor.getAllValues().get(0);
assertEquals(2, getTableEntriesIteratorsResp.getEntries().getEntries().size());
assertTrue(keyVersions.containsAll(getTableEntriesIteratorsResp.getEntries().getEntries().stream().map(e -> e.getKey().getKeyVersion()).collect(Collectors.toList())));
}
use of io.pravega.common.util.ByteArraySegment in project pravega by pravega.
the class SegmentAttributeBTreeIndex method get.
@Override
public CompletableFuture<Map<AttributeId, Long>> get(@NonNull Collection<AttributeId> keys, @NonNull Duration timeout) {
ensureInitialized();
if (keys.isEmpty()) {
// Nothing to do.
return CompletableFuture.completedFuture(Collections.emptyMap());
}
// Keep two lists, one of keys (in some order) and one of serialized keys (in the same order).
val keyList = new ArrayList<AttributeId>(keys.size());
val serializedKeys = new ArrayList<ByteArraySegment>(keys.size());
for (AttributeId key : keys) {
keyList.add(key);
serializedKeys.add(this.keySerializer.serialize(key));
}
// segment file (see READ_RETRY Javadoc).
return READ_RETRY.runAsync(() -> this.index.get(serializedKeys, timeout), this.executor).thenApply(entries -> {
assert entries.size() == keys.size() : "Unexpected number of entries returned by the index search.";
// The index search result is a list of values in the same order as the keys we passed in, so we need
// to use the list index to match them.
Map<AttributeId, Long> result = new HashMap<>();
for (int i = 0; i < keyList.size(); i++) {
ByteArraySegment v = entries.get(i);
if (v != null) {
// BTreeIndex will return null if a key is not present; however we exclude that from our result.
result.put(keyList.get(i), deserializeValue(v));
}
}
return result;
}).exceptionally(this::handleIndexOperationException);
}
Aggregations