use of org.hypertrace.core.documentstore.Key in project document-store by hypertrace.
the class PostgresDocStoreTest method testBulkUpsertAndReturn.
@Test
public void testBulkUpsertAndReturn() throws IOException {
Collection collection = datastore.getCollection(COLLECTION_NAME);
Map<Key, Document> bulkMap = new HashMap<>();
bulkMap.put(new SingleValueKey("default", "testKey1"), Utils.createDocument("name", "Bob"));
bulkMap.put(new SingleValueKey("default", "testKey2"), Utils.createDocument("name", "Alice"));
bulkMap.put(new SingleValueKey("default", "testKey3"), Utils.createDocument("name", "Alice"));
bulkMap.put(new SingleValueKey("default", "testKey4"), Utils.createDocument("name", "Bob"));
bulkMap.put(new SingleValueKey("default", "testKey5"), Utils.createDocument("name", "Alice"));
bulkMap.put(new SingleValueKey("default", "testKey6"), Utils.createDocument("email", "bob@example.com"));
Iterator<Document> iterator = collection.bulkUpsertAndReturnOlderDocuments(bulkMap);
// Initially there shouldn't be any documents.
Assertions.assertFalse(iterator.hasNext());
// The operation should be idempotent, so go ahead and try again.
iterator = collection.bulkUpsertAndReturnOlderDocuments(bulkMap);
List<Document> documents = new ArrayList<>();
while (iterator.hasNext()) {
documents.add(iterator.next());
}
Assertions.assertEquals(6, documents.size());
{
// empty query returns all the documents
Query query = new Query();
Assertions.assertEquals(6, collection.total(query));
}
{
Query query = new Query();
query.setFilter(Filter.eq("name", "Bob"));
Assertions.assertEquals(2, collection.total(query));
}
{
// limit should not affect the total
Query query = new Query();
query.setFilter(Filter.eq("name", "Bob"));
query.setLimit(1);
Assertions.assertEquals(2, collection.total(query));
}
}
use of org.hypertrace.core.documentstore.Key in project document-store by hypertrace.
the class PostgresCollection method bulkUpsertAndReturnOlderDocuments.
@Override
public CloseableIterator<Document> bulkUpsertAndReturnOlderDocuments(Map<Key, Document> documents) throws IOException {
String query = null;
try {
String collect = documents.keySet().stream().map(val -> "'" + val.toString() + "'").collect(Collectors.joining(", "));
String space = " ";
query = new StringBuilder("SELECT * FROM").append(space).append(collectionName).append(" WHERE ").append(ID).append(" IN ").append("(").append(collect).append(")").toString();
PreparedStatement preparedStatement = client.prepareStatement(query);
ResultSet resultSet = preparedStatement.executeQuery();
// Now go ahead and bulk upsert the documents.
int[] updateCounts = bulkUpsertImpl(documents);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Write result: {}", Arrays.toString(updateCounts));
}
return new PostgresResultIterator(resultSet);
} catch (IOException e) {
LOGGER.error("SQLException bulk inserting documents. documents: {}", documents, e);
} catch (SQLException e) {
LOGGER.error("SQLException querying documents. query: {}", query, e);
}
throw new IOException("Could not bulk upsert the documents.");
}
use of org.hypertrace.core.documentstore.Key in project entity-service by hypertrace.
the class EntityQueryServiceImpl method doBulkUpdate.
private void doBulkUpdate(RequestContext requestContext, Map<String, EntityUpdateInfo> entitiesMap) throws Exception {
Map<Key, Map<String, Document>> entitiesUpdateMap = new HashMap<>();
for (String entityId : entitiesMap.keySet()) {
Map<String, Document> transformedUpdateOperations = transformUpdateOperations(entitiesMap.get(entityId).getUpdateOperationList(), requestContext);
if (transformedUpdateOperations.isEmpty()) {
continue;
}
entitiesUpdateMap.put(new SingleValueKey(requestContext.getTenantId().orElseThrow(), entityId), transformedUpdateOperations);
}
if (entitiesUpdateMap.isEmpty()) {
LOG.error("There are no entities to update!");
return;
}
try {
entitiesCollection.bulkUpdateSubDocs(entitiesUpdateMap);
} catch (Exception e) {
LOG.error("Failed to update entities {}", entitiesMap, e);
throw e;
}
}
use of org.hypertrace.core.documentstore.Key in project entity-service by hypertrace.
the class EntityQueryServiceImpl method bulkUpdateEntityArrayAttribute.
@Override
public void bulkUpdateEntityArrayAttribute(BulkEntityArrayAttributeUpdateRequest request, StreamObserver<BulkEntityArrayAttributeUpdateResponse> responseObserver) {
RequestContext requestContext = RequestContext.CURRENT.get();
String tenantId = requestContext.getTenantId().orElse(null);
if (isNull(tenantId)) {
responseObserver.onError(new ServiceException("Tenant id is missing in the request."));
return;
}
try {
Set<Key> keys = request.getEntityIdsList().stream().map(entityId -> new SingleValueKey(tenantId, entityId)).collect(Collectors.toCollection(LinkedHashSet::new));
String attributeId = request.getAttribute().getColumnName();
String subDocPath = entityAttributeMapping.getDocStorePathByAttributeId(requestContext, attributeId).orElseThrow(() -> new IllegalArgumentException("Unknown attribute " + attributeId));
List<Document> subDocuments = request.getValuesList().stream().map(this::convertToJsonDocument).collect(toUnmodifiableList());
BulkArrayValueUpdateRequest bulkArrayValueUpdateRequest = new BulkArrayValueUpdateRequest(keys, subDocPath + ARRAY_VALUE_PATH_SUFFIX, getMatchingOperation(request.getOperation()), subDocuments);
entitiesCollection.bulkOperationOnArrayValue(bulkArrayValueUpdateRequest);
responseObserver.onNext(BulkEntityArrayAttributeUpdateResponse.newBuilder().build());
responseObserver.onCompleted();
} catch (Exception e) {
responseObserver.onError(e);
}
}
use of org.hypertrace.core.documentstore.Key in project entity-service by hypertrace.
the class EntityDataServiceImpl method getAndUpsertEntities.
@Override
public void getAndUpsertEntities(Entities request, StreamObserver<Entity> responseObserver) {
String tenantId = RequestContext.CURRENT.get().getTenantId().orElse(null);
if (tenantId == null) {
responseObserver.onError(new ServiceException("Tenant id is missing in the request."));
return;
}
try {
Map<Key, Document> documentMap = new HashMap<>();
List<Entity> updatedEntities = new ArrayList<>();
for (Entity entity : request.getEntityList()) {
Entity normalizedEntity = this.entityNormalizer.normalize(tenantId, entity);
updatedEntities.add(normalizedEntity);
Document doc = convertEntityToDocument(normalizedEntity);
Key key = this.entityNormalizer.getEntityDocKey(tenantId, normalizedEntity);
documentMap.put(key, doc);
}
List<Entity> existingEntities = Streams.stream(entitiesCollection.bulkUpsertAndReturnOlderDocuments(documentMap)).flatMap(document -> PARSER.<Entity>parseOrLog(document, Entity.newBuilder()).stream()).map(Entity::toBuilder).map(builder -> builder.setTenantId(tenantId)).map(Entity.Builder::build).collect(Collectors.toList());
existingEntities.forEach(responseObserver::onNext);
responseObserver.onCompleted();
entityChangeEventGenerator.sendChangeNotification(RequestContext.CURRENT.get(), existingEntities, updatedEntities);
} catch (IOException e) {
LOG.error("Failed to bulk upsert entities", e);
responseObserver.onError(e);
}
}
Aggregations