use of org.hypertrace.core.documentstore.Collection in project document-store by hypertrace.
the class MongoDocStoreTest method testReturnAndBulkUpsert.
@Test
public void testReturnAndBulkUpsert() throws IOException {
datastore.createCollection(COLLECTION_NAME, null);
Collection collection = datastore.getCollection(COLLECTION_NAME);
Map<Key, Document> documentMapV1 = Map.of(new SingleValueKey("default", "testKey1"), Utils.createDocument("id", "1", "testKey1", "abc-v1"), new SingleValueKey("default", "testKey2"), Utils.createDocument("id", "2", "testKey2", "xyz-v1"));
Iterator<Document> iterator = collection.bulkUpsertAndReturnOlderDocuments(documentMapV1);
// Initially there shouldn't be any documents.
Assertions.assertFalse(iterator.hasNext());
// Add more details to the document and bulk upsert again.
Map<Key, Document> documentMapV2 = Map.of(new SingleValueKey("default", "testKey1"), Utils.createDocument("id", "1", "testKey1", "abc-v2"), new SingleValueKey("default", "testKey2"), Utils.createDocument("id", "2", "testKey2", "xyz-v2"));
iterator = collection.bulkUpsertAndReturnOlderDocuments(documentMapV2);
assertEquals(2, collection.count());
List<Document> documents = new ArrayList<>();
while (iterator.hasNext()) {
documents.add(iterator.next());
}
assertEquals(2, documents.size());
Map<String, JsonNode> expectedDocs = convertToMap(documentMapV1.values(), "id");
Map<String, JsonNode> actualDocs = convertToMap(documents, "id");
// Verify that the documents returned were previous copies.
for (Map.Entry<String, JsonNode> entry : expectedDocs.entrySet()) {
JsonNode expected = entry.getValue();
JsonNode actual = actualDocs.get(entry.getKey());
Assertions.assertNotNull(actual);
JsonNode patch = JsonDiff.asJson(expected, actual);
// Verify that there are only additions and "no" removals in this new node.
Set<String> ops = new HashSet<>();
patch.elements().forEachRemaining(e -> {
if (e.has("op")) {
ops.add(e.get("op").asText());
}
});
Assertions.assertTrue(ops.contains("add"));
Assertions.assertEquals(1, ops.size());
}
// Delete one of the documents and test again.
collection.delete(new SingleValueKey("default", "testKey1"));
assertEquals(1, collection.count());
}
use of org.hypertrace.core.documentstore.Collection in project document-store by hypertrace.
the class MongoDocStoreTest method test_bulkOperationOnArrayValue_setOperation.
@Test
public void test_bulkOperationOnArrayValue_setOperation() throws Exception {
datastore.createCollection(COLLECTION_NAME, null);
Collection collection = datastore.getCollection(COLLECTION_NAME);
Key key1 = new SingleValueKey("default", "testKey1");
Document key1InsertedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey1"), ImmutablePair.of("attributes", Map.of("name", "testKey1", "labels", ImmutablePair.of("valueList", ImmutablePair.of("values", List.of(ImmutablePair.of("value", Map.of("string", "Label1"))))))));
Document key1ExpectedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey1"), ImmutablePair.of("attributes", Map.of("name", "testKey1", "labels", ImmutablePair.of("valueList", ImmutablePair.of("values", List.of(ImmutablePair.of("value", Map.of("string", "Label2")), ImmutablePair.of("value", Map.of("string", "Label3"))))))));
collection.upsert(key1, key1InsertedDocument);
Key key2 = new SingleValueKey("default", "testKey2");
Document key2InsertedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey2"), ImmutablePair.of("attributes", Map.of("name", "testKey2", "labels", ImmutablePair.of("valueList", ImmutablePair.of("values", List.of(ImmutablePair.of("value", Map.of("string", "Label2"))))))));
Document key2ExpectedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey2"), ImmutablePair.of("attributes", Map.of("name", "testKey2", "labels", ImmutablePair.of("valueList", ImmutablePair.of("values", List.of(ImmutablePair.of("value", Map.of("string", "Label2")), ImmutablePair.of("value", Map.of("string", "Label3"))))))));
collection.upsert(key2, key2InsertedDocument);
Key key3 = new SingleValueKey("default", "testKey3");
Document key3InsertedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey3"), ImmutablePair.of("attributes", Map.of("name", "testKey3")));
Document key3ExpectedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey3"), ImmutablePair.of("attributes", Map.of("name", "testKey3", "labels", ImmutablePair.of("valueList", ImmutablePair.of("values", List.of(ImmutablePair.of("value", Map.of("string", "Label2")), ImmutablePair.of("value", Map.of("string", "Label3"))))))));
collection.upsert(key3, key3InsertedDocument);
Key key4 = new SingleValueKey("default", "testKey4");
Document key4InsertedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey4"), ImmutablePair.of("attributes", Map.of("name", "testKey4", "labels", ImmutablePair.of("valueList", ImmutablePair.of("values", List.of(ImmutablePair.of("value", Map.of("string", "Label1")), ImmutablePair.of("value", Map.of("string", "Label2")), ImmutablePair.of("value", Map.of("string", "Label3"))))))));
Document key4ExpectedDocument = Utils.createDocument(ImmutablePair.of("id", "testKey4"), ImmutablePair.of("attributes", Map.of("name", "testKey4", "labels", ImmutablePair.of("valueList", ImmutablePair.of("values", List.of(ImmutablePair.of("value", Map.of("string", "Label2")), ImmutablePair.of("value", Map.of("string", "Label3"))))))));
collection.upsert(key4, key4InsertedDocument);
Document label2Document = Utils.createDocument(ImmutablePair.of("value", Map.of("string", "Label2")));
Document label3Document = Utils.createDocument(ImmutablePair.of("value", Map.of("string", "Label3")));
List<Document> subDocuments = List.of(label2Document, label3Document);
BulkArrayValueUpdateRequest bulkArrayValueUpdateRequest = new BulkArrayValueUpdateRequest(Set.of(key1, key2, key3, key4), "attributes.labels.valueList.values", SET, subDocuments);
BulkUpdateResult bulkUpdateResult = collection.bulkOperationOnArrayValue(bulkArrayValueUpdateRequest);
assertEquals(4, bulkUpdateResult.getUpdatedCount());
// get all documents
Query query = new Query();
Iterator<Document> results = collection.search(query);
List<Document> documents = new ArrayList<>();
while (results.hasNext()) {
documents.add(results.next());
}
assertEquals(4, documents.size());
Map<String, JsonNode> actualDocs = convertToMap(documents, "id");
Map<String, JsonNode> expectedDocs = convertToMap(List.of(key1ExpectedDocument, key2ExpectedDocument, key3ExpectedDocument, key4ExpectedDocument), "id");
// Verify that the documents returned are as expected
for (Map.Entry<String, JsonNode> entry : actualDocs.entrySet()) {
String key = entry.getKey();
JsonNode attributesJsonNode = entry.getValue().get("attributes");
JsonNode expectedAttributesJsonNode = expectedDocs.get(key).get("attributes");
assertEquals(expectedAttributesJsonNode, attributesJsonNode);
}
}
use of org.hypertrace.core.documentstore.Collection in project document-store by hypertrace.
the class MongoDocStoreTest method testBulkUpsertAndVerifyUpdatedTime.
@Test
public void testBulkUpsertAndVerifyUpdatedTime() throws IOException {
Collection collection = datastore.getCollection(COLLECTION_NAME);
ObjectNode objectNode = OBJECT_MAPPER.createObjectNode();
objectNode.put("foo1", "bar1");
Document document = new JSONDocument(objectNode);
collection.bulkUpsert(Map.of(new SingleValueKey("default", "testKey"), document));
Query query = new Query();
query.setFilter(Filter.eq("_id", "default:testKey"));
Iterator<Document> results = collection.search(query);
List<Document> documents = new ArrayList<>();
while (results.hasNext()) {
documents.add(results.next());
}
Assertions.assertFalse(documents.isEmpty());
String persistedDocument = documents.get(0).toJson();
// Assert _lastUpdateTime fields exists
Assertions.assertTrue(persistedDocument.contains(LAST_UPDATE_TIME_KEY));
Assertions.assertTrue(persistedDocument.contains(LAST_UPDATED_TIME_KEY));
Assertions.assertTrue(persistedDocument.contains(LAST_CREATED_TIME_KEY));
JsonNode node = OBJECT_MAPPER.readTree(persistedDocument);
String lastUpdateTime = node.findValue(LAST_UPDATE_TIME_KEY).findValue("$date").asText();
long updatedTime = node.findValue(LAST_UPDATED_TIME_KEY).asLong();
long createdTime = node.findValue(LAST_CREATED_TIME_KEY).asLong();
// Upsert again and verify that createdTime does not change, while lastUpdatedTime
// has changed
collection.bulkUpsert(Map.of(new SingleValueKey("default", "testKey"), document));
results = collection.search(query);
documents = new ArrayList<>();
while (results.hasNext()) {
documents.add(results.next());
}
Assertions.assertFalse(documents.isEmpty());
persistedDocument = documents.get(0).toJson();
node = OBJECT_MAPPER.readTree(persistedDocument);
String newLastUpdateTime = node.findValue(LAST_UPDATE_TIME_KEY).findValue("$date").asText();
long newUpdatedTime = node.findValue(LAST_UPDATED_TIME_KEY).asLong();
long newCreatedTime = node.findValue(LAST_CREATED_TIME_KEY).asLong();
Assertions.assertEquals(createdTime, newCreatedTime);
Assertions.assertFalse(newLastUpdateTime.equalsIgnoreCase(lastUpdateTime));
Assertions.assertNotEquals(newUpdatedTime, updatedTime);
}
use of org.hypertrace.core.documentstore.Collection in project document-store by hypertrace.
the class PostgresDocStoreTest method testBulkUpsertAndReturn.
@Test
public void testBulkUpsertAndReturn() throws IOException {
Collection collection = datastore.getCollection(COLLECTION_NAME);
Map<Key, Document> bulkMap = new HashMap<>();
bulkMap.put(new SingleValueKey("default", "testKey1"), Utils.createDocument("name", "Bob"));
bulkMap.put(new SingleValueKey("default", "testKey2"), Utils.createDocument("name", "Alice"));
bulkMap.put(new SingleValueKey("default", "testKey3"), Utils.createDocument("name", "Alice"));
bulkMap.put(new SingleValueKey("default", "testKey4"), Utils.createDocument("name", "Bob"));
bulkMap.put(new SingleValueKey("default", "testKey5"), Utils.createDocument("name", "Alice"));
bulkMap.put(new SingleValueKey("default", "testKey6"), Utils.createDocument("email", "bob@example.com"));
Iterator<Document> iterator = collection.bulkUpsertAndReturnOlderDocuments(bulkMap);
// Initially there shouldn't be any documents.
Assertions.assertFalse(iterator.hasNext());
// The operation should be idempotent, so go ahead and try again.
iterator = collection.bulkUpsertAndReturnOlderDocuments(bulkMap);
List<Document> documents = new ArrayList<>();
while (iterator.hasNext()) {
documents.add(iterator.next());
}
Assertions.assertEquals(6, documents.size());
{
// empty query returns all the documents
Query query = new Query();
Assertions.assertEquals(6, collection.total(query));
}
{
Query query = new Query();
query.setFilter(Filter.eq("name", "Bob"));
Assertions.assertEquals(2, collection.total(query));
}
{
// limit should not affect the total
Query query = new Query();
query.setFilter(Filter.eq("name", "Bob"));
query.setLimit(1);
Assertions.assertEquals(2, collection.total(query));
}
}
use of org.hypertrace.core.documentstore.Collection in project entity-service by hypertrace.
the class EntityDataServiceImpl method searchByIdAndStreamSingleResponse.
private <T extends Message> void searchByIdAndStreamSingleResponse(String tenantId, String entityId, String entityType, Collection collection, Message.Builder builder, StreamObserver<T> responseObserver) {
org.hypertrace.core.documentstore.Query query = new org.hypertrace.core.documentstore.Query();
String docId = this.entityNormalizer.getEntityDocKey(tenantId, entityType, entityId).toString();
query.setFilter(new Filter(Filter.Op.EQ, EntityServiceConstants.ID, docId));
Iterator<Document> result = collection.search(query);
List<T> entities = new ArrayList<>();
while (result.hasNext()) {
PARSER.<T>parseOrLog(result.next(), builder.clone()).map(entity -> {
// Populate the tenant id field with the tenant id that's received for backward
// compatibility.
Descriptors.FieldDescriptor fieldDescriptor = entity.getDescriptorForType().findFieldByName("tenant_id");
if (fieldDescriptor != null) {
return (T) entity.toBuilder().setField(fieldDescriptor, tenantId).build();
}
return entity;
}).ifPresent(entities::add);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Docstore query has returned the result: {}", entities);
}
if (entities.size() == 1) {
responseObserver.onNext(entities.get(0));
responseObserver.onCompleted();
} else if (entities.size() > 1) {
responseObserver.onError(new IllegalStateException("Multiple entities with same id are found."));
} else {
// When there is no result, we should return the default instance, which is a way
// of saying it's null.
// TODO : Not convinced with the default instance
responseObserver.onNext((T) builder.build());
responseObserver.onCompleted();
}
}
Aggregations