use of org.springframework.data.mongodb.core.query.Collation in project spring-data-mongodb by spring-projects.
the class MongoTemplate method doRemove.
protected <T> DeleteResult doRemove(final String collectionName, final Query query, @Nullable final Class<T> entityClass) {
Assert.notNull(query, "Query must not be null!");
Assert.hasText(collectionName, "Collection name must not be null or empty!");
final MongoPersistentEntity<?> entity = getPersistentEntity(entityClass);
final Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), entity);
return execute(collectionName, new CollectionCallback<DeleteResult>() {
public DeleteResult doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
maybeEmitEvent(new BeforeDeleteEvent<T>(queryObject, entityClass, collectionName));
Document removeQuery = queryObject;
DeleteOptions options = new DeleteOptions();
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, null, queryObject);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
DeleteResult dr = null;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Remove using query: {} in collection: {}.", new Object[] { serializeToJsonSafely(removeQuery), collectionName });
}
if (query.getLimit() > 0 || query.getSkip() > 0) {
MongoCursor<Document> cursor = new QueryCursorPreparer(query, entityClass).prepare(collection.find(removeQuery).projection(new Document(ID_FIELD, 1))).iterator();
Set<Object> ids = new LinkedHashSet<>();
while (cursor.hasNext()) {
ids.add(cursor.next().get(ID_FIELD));
}
removeQuery = new Document(ID_FIELD, new Document("$in", ids));
}
if (writeConcernToUse == null) {
dr = collection.deleteMany(removeQuery, options);
} else {
dr = collection.withWriteConcern(writeConcernToUse).deleteMany(removeQuery, options);
}
maybeEmitEvent(new AfterDeleteEvent<T>(queryObject, entityClass, collectionName));
return dr;
}
});
}
use of org.springframework.data.mongodb.core.query.Collation in project cas by apereo.
the class MongoDbWebAuthnCredentialRepository method update.
@Override
@SneakyThrows
protected void update(final String username, final Collection<CredentialRegistration> givenRecords) {
val records = givenRecords.stream().map(record -> {
if (record.getRegistrationTime() == null) {
return record.withRegistrationTime(Instant.now(Clock.systemUTC()));
}
return record;
}).collect(Collectors.toList());
val query = new Query(Criteria.where(MongoDbWebAuthnCredentialRegistration.FIELD_USERNAME).is(username)).collation(Collation.of(Locale.ENGLISH).strength(Collation.ComparisonLevel.primary()));
val collection = getProperties().getAuthn().getMfa().getWebAuthn().getMongo().getCollection();
if (records.isEmpty()) {
LOGGER.debug("No records are provided for [{}] so entry will be removed", username);
mongoTemplate.remove(query, MongoDbWebAuthnCredentialRegistration.class, collection);
} else {
val jsonRecords = getCipherExecutor().encode(WebAuthnUtils.getObjectMapper().writeValueAsString(records));
val entry = MongoDbWebAuthnCredentialRegistration.builder().records(jsonRecords).username(username).build();
val update = Update.update(MongoDbWebAuthnCredentialRegistration.FIELD_RECORDS, jsonRecords);
val result = mongoTemplate.updateFirst(query, update, collection);
if (result.getMatchedCount() <= 0) {
LOGGER.debug("Storing new registration record for [{}]", username);
mongoTemplate.save(entry, collection);
}
}
}
use of org.springframework.data.mongodb.core.query.Collation in project spring-data-mongodb by spring-projects.
the class MongoTemplate method doAggregate.
@SuppressWarnings("ConstantConditions")
protected <O> AggregationResults<O> doAggregate(Aggregation aggregation, String collectionName, Class<O> outputType, AggregationOperationContext context) {
ReadDocumentCallback<O> callback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName);
AggregationOptions options = aggregation.getOptions();
AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
if (options.isExplain()) {
Document command = aggregationUtil.createCommand(collectionName, aggregation, context);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Executing aggregation: %s", serializeToJsonSafely(command)));
}
Document commandResult = executeCommand(command);
return new AggregationResults<>(commandResult.get("results", new ArrayList<Document>(0)).stream().map(callback::doWith).collect(Collectors.toList()), commandResult);
}
List<Document> pipeline = aggregationUtil.createPipeline(aggregation, context);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Executing aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName));
}
return execute(collectionName, collection -> {
List<Document> rawResult = new ArrayList<>();
Class<?> domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation<?>) aggregation).getInputType() : null;
Optional<Collation> collation = Optionals.firstNonEmpty(options::getCollation, () -> //
operations.forType(domainType).getCollation());
AggregateIterable<Document> aggregateIterable = //
collection.aggregate(pipeline, Document.class).collation(//
collation.map(Collation::toMongoCollation).orElse(null)).allowDiskUse(options.isAllowDiskUse());
if (options.getCursorBatchSize() != null) {
aggregateIterable = aggregateIterable.batchSize(options.getCursorBatchSize());
}
options.getComment().ifPresent(aggregateIterable::comment);
options.getHint().ifPresent(aggregateIterable::hint);
if (options.hasExecutionTimeLimit()) {
aggregateIterable = aggregateIterable.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS);
}
if (options.isSkipResults()) {
// toCollection only allowed for $out and $merge if those are the last stages
if (aggregation.getPipeline().isOutOrMerge()) {
aggregateIterable.toCollection();
} else {
aggregateIterable.first();
}
return new AggregationResults<>(Collections.emptyList(), new Document());
}
MongoIterable<O> iterable = aggregateIterable.map(val -> {
rawResult.add(val);
return callback.doWith(val);
});
return new AggregationResults<>(iterable.into(new ArrayList<>()), new Document("results", rawResult).append("ok", 1.0D));
});
}
use of org.springframework.data.mongodb.core.query.Collation in project spring-data-mongodb by spring-projects.
the class MongoTemplate method mapReduce.
/**
* @param query
* @param domainType
* @param inputCollectionName
* @param mapFunction
* @param reduceFunction
* @param mapReduceOptions
* @param resultType
* @return
* @since 2.1
* @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}.
*/
@Deprecated
public <T> List<T> mapReduce(Query query, Class<?> domainType, String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class<T> resultType) {
Assert.notNull(domainType, "Domain type must not be null!");
Assert.notNull(inputCollectionName, "Input collection name must not be null!");
Assert.notNull(resultType, "Result type must not be null!");
Assert.notNull(mapFunction, "Map function must not be null!");
Assert.notNull(reduceFunction, "Reduce function must not be null!");
String mapFunc = replaceWithResourceIfNecessary(mapFunction);
String reduceFunc = replaceWithResourceIfNecessary(reduceFunction);
MongoCollection<Document> inputCollection = getAndPrepareCollection(doGetDatabase(), inputCollectionName);
// MapReduceOp
MapReduceIterable<Document> mapReduce = inputCollection.mapReduce(mapFunc, reduceFunc, Document.class);
if (query.getLimit() > 0 && mapReduceOptions != null && mapReduceOptions.getLimit() == null) {
mapReduce = mapReduce.limit(query.getLimit());
}
if (query.getMeta().getMaxTimeMsec() != null) {
mapReduce = mapReduce.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS);
}
Document mappedSort = getMappedSortObject(query, domainType);
if (mappedSort != null && !mappedSort.isEmpty()) {
mapReduce = mapReduce.sort(mappedSort);
}
mapReduce = mapReduce.filter(queryMapper.getMappedObject(query.getQueryObject(), mappingContext.getPersistentEntity(domainType)));
Optional<Collation> collation = query.getCollation();
if (mapReduceOptions != null) {
Optionals.ifAllPresent(collation, mapReduceOptions.getCollation(), (l, r) -> {
throw new IllegalArgumentException("Both Query and MapReduceOptions define a collation. Please provide the collation only via one of the two.");
});
if (mapReduceOptions.getCollation().isPresent()) {
collation = mapReduceOptions.getCollation();
}
if (!CollectionUtils.isEmpty(mapReduceOptions.getScopeVariables())) {
mapReduce = mapReduce.scope(new Document(mapReduceOptions.getScopeVariables()));
}
if (mapReduceOptions.getLimit() != null && mapReduceOptions.getLimit() > 0) {
mapReduce = mapReduce.limit(mapReduceOptions.getLimit());
}
if (mapReduceOptions.getFinalizeFunction().filter(StringUtils::hasText).isPresent()) {
mapReduce = mapReduce.finalizeFunction(mapReduceOptions.getFinalizeFunction().get());
}
if (mapReduceOptions.getJavaScriptMode() != null) {
mapReduce = mapReduce.jsMode(mapReduceOptions.getJavaScriptMode());
}
if (mapReduceOptions.getOutputSharded().isPresent()) {
mapReduce = mapReduce.sharded(mapReduceOptions.getOutputSharded().get());
}
if (StringUtils.hasText(mapReduceOptions.getOutputCollection()) && !mapReduceOptions.usesInlineOutput()) {
mapReduce = mapReduce.collectionName(mapReduceOptions.getOutputCollection()).action(mapReduceOptions.getMapReduceAction());
if (mapReduceOptions.getOutputDatabase().isPresent()) {
mapReduce = mapReduce.databaseName(mapReduceOptions.getOutputDatabase().get());
}
}
}
if (!collation.isPresent()) {
collation = operations.forType(domainType).getCollation();
}
mapReduce = collation.map(Collation::toMongoCollation).map(mapReduce::collation).orElse(mapReduce);
List<T> mappedResults = new ArrayList<>();
DocumentCallback<T> callback = new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName);
for (Document document : mapReduce) {
mappedResults.add(callback.doWith(document));
}
return mappedResults;
}
use of org.springframework.data.mongodb.core.query.Collation in project spring-data-mongodb by spring-projects.
the class ReactiveMongoTemplate method changeStream.
@Override
public <T> Flux<ChangeStreamEvent<T>> changeStream(@Nullable String database, @Nullable String collectionName, ChangeStreamOptions options, Class<T> targetType) {
List<Document> filter = prepareFilter(options);
FullDocument fullDocument = ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT : FullDocument.UPDATE_LOOKUP;
return //
ReactiveMongoDatabaseUtils.getDatabase(database, mongoDatabaseFactory).map(db -> {
ChangeStreamPublisher<Document> publisher;
if (StringUtils.hasText(collectionName)) {
publisher = filter.isEmpty() ? db.getCollection(collectionName).watch(Document.class) : db.getCollection(collectionName).watch(filter, Document.class);
} else {
publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class);
}
publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher);
publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher);
publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher);
return publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument));
}).flatMapMany(publisher -> Flux.from(publisher).map(document -> new ChangeStreamEvent<>(document, targetType, getConverter())));
}
Aggregations