Compare commits

..

11 Commits

Author SHA1 Message Date
Christoph Strobl
d94d273010 Fix test 2023-01-25 15:05:56 +01:00
Christoph Strobl
b9f6463337 decrypt? 2023-01-25 14:32:38 +01:00
Christoph Strobl
095022e71d reactive FLE encryptiion works -> next decrypt 2023-01-25 14:32:35 +01:00
Christoph Strobl
329b4b2881 Hacking - Reactive FLE
experiment with resolving reactive types in document
2023-01-25 14:24:35 +01:00
Christoph Strobl
73aeb7a425 Test encryption during update 2023-01-25 14:24:35 +01:00
Christoph Strobl
4b8ac4d249 Some changes that allow reading the alt key from a field
typically only supported in automatic schema but neat to have it here as well. eg. for customer data cyper based on eg. username.
Also make sure to translate decryption exceptions.
2023-01-25 14:24:35 +01:00
Christoph Strobl
1a7157fa7c Encrypt collection of complex types. 2023-01-25 14:24:35 +01:00
Christoph Strobl
10a089fe77 Encrypt collection of simple values 2023-01-25 14:24:35 +01:00
Christoph Strobl
7b93379165 Enable full encryption of nested documents. 2023-01-25 14:24:35 +01:00
Christoph Strobl
0361c3acc9 Hacking 2023-01-25 14:24:35 +01:00
Christoph Strobl
a6641e0c01 Prepare issue branch. 2023-01-25 14:24:34 +01:00
37 changed files with 1337 additions and 1423 deletions

View File

@@ -1,2 +1,2 @@
#Mon Jan 30 10:48:12 CET 2023
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.7/apache-maven-3.8.7-bin.zip
#Fri Jun 03 09:32:40 CEST 2022
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.5/apache-maven-3.8.5-bin.zip

View File

@@ -1,16 +1,16 @@
# Java versions
java.main.tag=17.0.6_10-jdk-focal
java.main.tag=17.0.5_8-jdk-focal
# Docker container images - standard
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
# Supported versions of MongoDB
docker.mongodb.4.4.version=4.4.18
docker.mongodb.5.0.version=5.0.14
docker.mongodb.6.0.version=6.0.4
docker.mongodb.4.4.version=4.4.17
docker.mongodb.5.0.version=5.0.13
docker.mongodb.6.0.version=6.0.2
# Supported versions of Redis
docker.redis.6.version=6.2.10
docker.redis.6.version=6.2.6
# Supported versions of Cassandra
docker.cassandra.3.version=3.11.14

12
pom.xml
View File

@@ -5,7 +5,7 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0-M1</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Spring Data MongoDB</name>
@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>3.1.0-M1</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<modules>
@@ -26,8 +26,8 @@
<properties>
<project.type>multi</project.type>
<dist.id>spring-data-mongodb</dist.id>
<springdata.commons>3.1.0-M1</springdata.commons>
<mongo>4.9.0</mongo>
<springdata.commons>3.1.0-SNAPSHOT</springdata.commons>
<mongo>4.8.2</mongo>
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
<jmh.version>1.19</jmh.version>
</properties>
@@ -145,8 +145,8 @@
<repositories>
<repository>
<id>spring-libs-milestone</id>
<url>https://repo.spring.io/libs-milestone</url>
<id>spring-libs-snapshot</id>
<url>https://repo.spring.io/libs-snapshot</url>
<snapshots>
<enabled>true</enabled>
</snapshots>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0-M1</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0-M1</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -13,7 +13,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.1.0-M1</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@@ -112,6 +112,13 @@
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-crypt</artifactId>
<version>1.6.1</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>io.projectreactor</groupId>
<artifactId>reactor-core</artifactId>

View File

@@ -1,61 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.util.Assert;
import com.mongodb.client.MongoCollection;
/**
* Interface for functional preparation of a {@link MongoCollection}.
*
* @author Mark Paluch
* @since 4.1
*/
public interface CollectionPreparer<T> {
/**
* Returns a preparer that always returns its input collection.
*
* @return a preparer that always returns its input collection.
*/
static <T> CollectionPreparer<T> identity() {
return it -> it;
}
/**
* Prepare the {@code collection}.
*
* @param collection the collection to prepare.
* @return the prepared collection.
*/
T prepare(T collection);
/**
* Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies
* the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to
* the caller of the composed function.
*
* @param after the collection preparer to apply after this function is applied.
* @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after}
* preparer.
*/
default CollectionPreparer<T> andThen(CollectionPreparer<T> after) {
Assert.notNull(after, "After CollectionPreparer must not be null");
return c -> after.prepare(prepare(c));
}
}

View File

@@ -1,182 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.Arrays;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.bson.Document;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
import com.mongodb.client.MongoCollection;
/**
* Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon
* {@link CollectionPreparer preparing a collection}.
*
* @author Mark Paluch
* @since 4.1
*/
class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware {
private final List<Object> sources;
private CollectionPreparerSupport(List<Object> sources) {
this.sources = sources;
}
<T> T doPrepare(T collection, Function<T, ReadConcern> concernAccessor, BiFunction<T, ReadConcern, T> concernFunction,
Function<T, ReadPreference> preferenceAccessor, BiFunction<T, ReadPreference, T> preferenceFunction) {
T collectionToUse = collection;
for (Object source : sources) {
if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) {
ReadConcern concern = rca.getReadConcern();
if (concernAccessor.apply(collectionToUse) != concern) {
collectionToUse = concernFunction.apply(collectionToUse, concern);
}
break;
}
}
for (Object source : sources) {
if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
ReadPreference preference = rpa.getReadPreference();
if (preferenceAccessor.apply(collectionToUse) != preference) {
collectionToUse = preferenceFunction.apply(collectionToUse, preference);
}
break;
}
}
return collectionToUse;
}
@Override
public boolean hasReadConcern() {
for (Object aware : sources) {
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
return true;
}
}
return false;
}
@Override
public ReadConcern getReadConcern() {
for (Object aware : sources) {
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
return rca.getReadConcern();
}
}
return null;
}
@Override
public boolean hasReadPreference() {
for (Object aware : sources) {
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
return true;
}
}
return false;
}
@Override
public ReadPreference getReadPreference() {
for (Object aware : sources) {
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
return rpa.getReadPreference();
}
}
return null;
}
static class CollectionPreparerDelegate extends CollectionPreparerSupport
implements CollectionPreparer<MongoCollection<Document>> {
private CollectionPreparerDelegate(List<Object> sources) {
super(sources);
}
public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) {
return of((Object[]) awares);
}
public static CollectionPreparerDelegate of(Object... mixedAwares) {
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
return (CollectionPreparerDelegate) mixedAwares[0];
}
return new CollectionPreparerDelegate(Arrays.asList(mixedAwares));
}
@Override
public MongoCollection<Document> prepare(MongoCollection<Document> collection) {
return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern,
MongoCollection::getReadPreference, MongoCollection::withReadPreference);
}
}
static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport
implements CollectionPreparer<com.mongodb.reactivestreams.client.MongoCollection<Document>> {
private ReactiveCollectionPreparerDelegate(List<Object> sources) {
super(sources);
}
public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) {
return of((Object[]) awares);
}
public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) {
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
return (ReactiveCollectionPreparerDelegate) mixedAwares[0];
}
return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares));
}
@Override
public com.mongodb.reactivestreams.client.MongoCollection<Document> prepare(
com.mongodb.reactivestreams.client.MongoCollection<Document> collection) {
return doPrepare(collection, //
com.mongodb.reactivestreams.client.MongoCollection::getReadConcern,
com.mongodb.reactivestreams.client.MongoCollection::withReadConcern,
com.mongodb.reactivestreams.client.MongoCollection::getReadPreference,
com.mongodb.reactivestreams.client.MongoCollection::withReadPreference);
}
}
}

View File

@@ -20,6 +20,7 @@ import java.util.Optional;
import java.util.stream.Stream;
import org.bson.Document;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
@@ -167,8 +168,7 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
Document queryObject = query.getQueryObject();
Document fieldsObject = query.getFieldsObject();
return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType,
returnType,
return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType,
getCursorPreparer(query, preparer));
}

View File

@@ -68,6 +68,8 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
private static final Set<String> DATA_INTEGRITY_EXCEPTIONS = new HashSet<>(
Arrays.asList("WriteConcernException", "MongoWriteException", "MongoBulkWriteException"));
private static final Set<String> SECURITY_EXCEPTIONS = Set.of("MongoCryptException");
@Nullable
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
@@ -131,6 +133,8 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
return new ClientSessionException(ex.getMessage(), ex);
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
return new MongoTransactionException(ex.getMessage(), ex);
} else if(ex.getCause() != null && SECURITY_EXCEPTIONS.contains(ClassUtils.getShortName(ex.getCause().getClass()))) {
return new PermissionDeniedDataAccessException(ex.getMessage(), ex);
}
return new UncategorizedMongoDbException(ex.getMessage(), ex);

View File

@@ -55,7 +55,6 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.MongoDatabaseUtils;
import org.springframework.data.mongodb.SessionSynchronization;
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.CollectionPreparerSupport.CollectionPreparerDelegate;
import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext;
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
@@ -67,7 +66,6 @@ import org.springframework.data.mongodb.core.QueryOperations.UpdateContext;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder;
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
@@ -85,6 +83,7 @@ import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Meta;
import org.springframework.data.mongodb.core.query.Meta.CursorOption;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
@@ -113,23 +112,7 @@ import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
/**
* Primary implementation of {@link MongoOperations}. It simplifies the use of imperative MongoDB usage and helps to
* avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and
* extract results. This class executes BSON queries or updates, initiating iteration over {@link FindIterable} and
* catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the
* org.springframework.dao package. Can be used within a service implementation via direct instantiation with a
* {@link MongoDatabaseFactory} reference, or get prepared in an application context and given to services as bean
* reference.
* <p>
* Note: The {@link MongoDatabaseFactory} should always be configured as a bean in the application context, in the first
* case given to the service directly, in the second case to the prepared template.
* <h3>{@link ReadPreference} and {@link com.mongodb.ReadConcern}</h3>
* <p>
* {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and
* {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}.
* <p>
* You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to
* generally apply a {@link ReadPreference}.
* Primary implementation of {@link MongoOperations}.
*
* @author Thomas Risberg
* @author Graeme Rocher
@@ -158,8 +141,7 @@ import com.mongodb.client.result.UpdateResult;
* @author Bartłomiej Mazur
* @author Michael Krog
*/
public class MongoTemplate
implements MongoOperations, ApplicationContextAware, IndexOperationsProvider, ReadPreferenceAware {
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
private static final Log LOGGER = LogFactory.getLog(MongoTemplate.class);
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
@@ -311,16 +293,6 @@ public class MongoTemplate
this.readPreference = readPreference;
}
@Override
public boolean hasReadPreference() {
return this.readPreference != null;
}
@Override
public ReadPreference getReadPreference() {
return this.readPreference;
}
/**
* Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be
* published or whether emission should be suppressed. Enabled by default.
@@ -391,10 +363,10 @@ public class MongoTemplate
if (enabled) {
this.countExecution = (collectionPreparer, collectionName, filter, options) -> {
this.countExecution = (collectionName, filter, options) -> {
if (!estimationFilter.test(filter, options)) {
return doExactCount(collectionPreparer, collectionName, filter, options);
return doExactCount(collectionName, filter, options);
}
EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions();
@@ -402,7 +374,7 @@ public class MongoTemplate
estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS);
}
return doEstimatedCount(collectionPreparer, collectionName, estimatedDocumentCountOptions);
return doEstimatedCount(collectionName, estimatedDocumentCountOptions);
};
} else {
this.countExecution = this::doExactCount;
@@ -471,9 +443,8 @@ public class MongoTemplate
Document mappedQuery = queryContext.getMappedQuery(persistentEntity);
Document mappedFields = queryContext.getMappedFields(persistentEntity, projection);
CollectionPreparerDelegate readPreference = createDelegate(query);
FindIterable<Document> cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection,
col -> readPreference.prepare(col).find(mappedQuery, Document.class).projection(mappedFields));
col -> col.find(mappedQuery, Document.class).projection(mappedFields));
return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator,
new ProjectingReadCallback<>(mongoConverter, projection, collectionName)).stream();
@@ -546,7 +517,7 @@ public class MongoTemplate
serializeToJsonSafely(queryObject), sortObject, fieldsObject, collectionName));
}
this.executeQueryInternal(new FindCallback(createDelegate(query), queryObject, fieldsObject, null),
this.executeQueryInternal(new FindCallback(queryObject, fieldsObject, null),
preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, documentCallbackHandler, collectionName);
}
@@ -794,7 +765,7 @@ public class MongoTemplate
if (ObjectUtils.isEmpty(query.getSortObject())) {
return doFindOne(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(),
return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(),
new QueryCursorPreparer(query, entityClass), entityClass);
} else {
query.limit(1);
@@ -826,7 +797,7 @@ public class MongoTemplate
Document mappedQuery = queryContext.getMappedQuery(entityClass, this::getPersistentEntity);
return execute(collectionName,
new ExistsCallback(createDelegate(query), mappedQuery, queryContext.getCollation(entityClass).orElse(null)));
new ExistsCallback(mappedQuery, queryContext.getCollation(entityClass).orElse(null)));
}
// Find methods that take a Query to express the query and that return a List of objects.
@@ -843,7 +814,7 @@ public class MongoTemplate
Assert.notNull(collectionName, "CollectionName must not be null");
Assert.notNull(entityClass, "EntityClass must not be null");
return doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), entityClass,
return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass,
new QueryCursorPreparer(query, entityClass));
}
@@ -863,8 +834,7 @@ public class MongoTemplate
String idKey = operations.getIdPropertyName(entityClass);
return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), new Document(),
entityClass);
return doFindOne(collectionName, new Document(idKey, id), new Document(), entityClass);
}
@Override
@@ -897,7 +867,10 @@ public class MongoTemplate
serializeToJsonSafely(mappedQuery), field, collectionName));
}
collection = createDelegate(query).prepare(collection);
QueryCursorPreparer preparer = new QueryCursorPreparer(query, entityClass);
if (preparer.hasReadPreference()) {
collection = collection.withReadPreference(preparer.getReadPreference());
}
DistinctIterable<T> iterable = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType);
distinctQueryContext.applyCollation(entityClass, iterable::collation);
@@ -947,18 +920,8 @@ public class MongoTemplate
String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(domainType);
String distanceField = operations.nearQueryDistanceFieldName(domainType);
Builder optionsBuilder = AggregationOptions.builder().collation(near.getCollation());
if (near.hasReadPreference()) {
optionsBuilder.readPreference(near.getReadPreference());
}
if(near.hasReadConcern()) {
optionsBuilder.readConcern(near.getReadConcern());
}
Aggregation $geoNear = TypedAggregation.newAggregation(domainType, Aggregation.geoNear(near, distanceField))
.withOptions(optionsBuilder.build());
.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
AggregationResults<Document> results = aggregate($geoNear, collection, Document.class);
EntityProjection<T, ?> projection = operations.introspectProjection(returnType, domainType);
@@ -1023,7 +986,7 @@ public class MongoTemplate
operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation);
}
return doFindAndModify(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(),
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(),
getMappedSortObject(query, entityClass), entityClass, update, optionsToUse);
}
@@ -1045,7 +1008,6 @@ public class MongoTemplate
QueryContext queryContext = queryOperations.createQueryContext(query);
EntityProjection<T, S> projection = operations.introspectProjection(resultType, entityType);
CollectionPreparerDelegate collectionPreparer = createDelegate(query);
Document mappedQuery = queryContext.getMappedQuery(entity);
Document mappedFields = queryContext.getMappedFields(entity, projection);
Document mappedSort = queryContext.getMappedSort(entity);
@@ -1056,7 +1018,7 @@ public class MongoTemplate
maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName));
maybeCallBeforeSave(replacement, mappedReplacement, collectionName);
T saved = doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort,
T saved = doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort,
queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, projection);
if (saved != null) {
@@ -1084,7 +1046,7 @@ public class MongoTemplate
Assert.notNull(entityClass, "EntityClass must not be null");
Assert.notNull(collectionName, "CollectionName must not be null");
return doFindAndRemove(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(),
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(),
getMappedSortObject(query, entityClass), operations.forType(entityClass).getCollation(query).orElse(null),
entityClass);
}
@@ -1116,19 +1078,17 @@ public class MongoTemplate
CountOptions options = countContext.getCountOptions(entityClass);
Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity);
CollectionPreparerDelegate readPreference = createDelegate(query);
return doCount(readPreference, collectionName, mappedQuery, options);
return doCount(collectionName, mappedQuery, options);
}
protected long doCount(CollectionPreparer collectionPreparer, String collectionName, Document filter,
CountOptions options) {
protected long doCount(String collectionName, Document filter, CountOptions options) {
if (LOGGER.isDebugEnabled()) {
LOGGER
.debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName));
}
return countExecution.countDocuments(collectionPreparer, collectionName, filter, options);
return countExecution.countDocuments(collectionName, filter, options);
}
/*
@@ -1137,13 +1097,11 @@ public class MongoTemplate
*/
@Override
public long estimatedCount(String collectionName) {
return doEstimatedCount(CollectionPreparerDelegate.of(this), collectionName, new EstimatedDocumentCountOptions());
return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions());
}
protected long doEstimatedCount(CollectionPreparer<MongoCollection<Document>> collectionPreparer,
String collectionName, EstimatedDocumentCountOptions options) {
return execute(collectionName,
collection -> collectionPreparer.prepare(collection).estimatedDocumentCount(options));
protected long doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) {
return execute(collectionName, collection -> collection.estimatedDocumentCount(options));
}
@Override
@@ -1154,13 +1112,12 @@ public class MongoTemplate
CountOptions options = countContext.getCountOptions(entityClass);
Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity);
return doExactCount(createDelegate(query), collectionName, mappedQuery, options);
return doExactCount(collectionName, mappedQuery, options);
}
protected long doExactCount(CollectionPreparer<MongoCollection<Document>> collectionPreparer, String collectionName,
Document filter, CountOptions options) {
return execute(collectionName, collection -> collectionPreparer.prepare(collection)
.countDocuments(CountQuery.of(filter).toQueryDocument(), options));
protected long doExactCount(String collectionName, Document filter, CountOptions options) {
return execute(collectionName,
collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options));
}
protected boolean countCanBeEstimated(Document filter, CountOptions options) {
@@ -1220,8 +1177,8 @@ public class MongoTemplate
*/
protected MongoCollection<Document> prepareCollection(MongoCollection<Document> collection) {
if (this.readPreference != null && this.readPreference != collection.getReadPreference()) {
return collection.withReadPreference(readPreference);
if (this.readPreference != null) {
collection = collection.withReadPreference(readPreference);
}
return collection;
@@ -1797,7 +1754,7 @@ public class MongoTemplate
@Override
public <T> List<T> findAll(Class<T> entityClass, String collectionName) {
return executeFindMultiInternal(
new FindCallback(CollectionPreparer.identity(), new Document(), new Document(),
new FindCallback(new Document(), new Document(),
operations.forType(entityClass).getCollation().map(Collation::toMongoCollation).orElse(null)),
CursorPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName),
collectionName);
@@ -1855,9 +1812,7 @@ public class MongoTemplate
String mapFunc = replaceWithResourceIfNecessary(mapFunction);
String reduceFunc = replaceWithResourceIfNecessary(reduceFunction);
CollectionPreparerDelegate readPreference = createDelegate(query);
MongoCollection<Document> inputCollection = readPreference
.prepare(getAndPrepareCollection(doGetDatabase(), inputCollectionName));
MongoCollection<Document> inputCollection = getAndPrepareCollection(doGetDatabase(), inputCollectionName);
// MapReduceOp
MapReduceIterable<Document> mapReduce = inputCollection.mapReduce(mapFunc, reduceFunc, Document.class);
@@ -2022,9 +1977,6 @@ public class MongoTemplate
if (!CollectionUtils.isEmpty(result)) {
Query byIdInQuery = operations.getByIdInQuery(result);
if (query.hasReadPreference()) {
byIdInQuery.withReadPreference(query.getReadPreference());
}
remove(byIdInQuery, entityClass, collectionName);
}
@@ -2080,7 +2032,7 @@ public class MongoTemplate
return execute(collectionName, collection -> {
List<Document> rawResult = new ArrayList<>();
CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options);
Class<?> domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation<?>) aggregation).getInputType()
: null;
@@ -2088,7 +2040,7 @@ public class MongoTemplate
() -> operations.forType(domainType) //
.getCollation());
AggregateIterable<Document> aggregateIterable = delegate.prepare(collection).aggregate(pipeline, Document.class) //
AggregateIterable<Document> aggregateIterable = collection.aggregate(pipeline, Document.class) //
.collation(collation.map(Collation::toMongoCollation).orElse(null)) //
.allowDiskUse(options.isAllowDiskUse());
@@ -2151,9 +2103,7 @@ public class MongoTemplate
return execute(collectionName, (CollectionCallback<Stream<O>>) collection -> {
CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options);
AggregateIterable<Document> cursor = delegate.prepare(collection).aggregate(pipeline, Document.class) //
AggregateIterable<Document> cursor = collection.aggregate(pipeline, Document.class) //
.allowDiskUse(options.isAllowDiskUse());
if (options.getCursorBatchSize() != null) {
@@ -2394,16 +2344,14 @@ public class MongoTemplate
* The query document is specified as a standard {@link Document} and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record.
* @param fields the document that specifies the fields to be returned.
* @param entityClass the parameterized type of the returned list.
* @return the converted object or {@literal null} if none exists.
*/
@Nullable
protected <T> T doFindOne(String collectionName, CollectionPreparer<MongoCollection<Document>> collectionPreparer,
Document query, Document fields, Class<T> entityClass) {
return doFindOne(collectionName, collectionPreparer, query, fields, CursorPreparer.NO_OP_PREPARER, entityClass);
protected <T> T doFindOne(String collectionName, Document query, Document fields, Class<T> entityClass) {
return doFindOne(collectionName, query, fields, CursorPreparer.NO_OP_PREPARER, entityClass);
}
/**
@@ -2411,18 +2359,17 @@ public class MongoTemplate
* The query document is specified as a standard {@link Document} and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record.
* @param fields the document that specifies the fields to be returned.
* @param preparer the preparer used to modify the cursor on execution.
* @param entityClass the parameterized type of the returned list.
* @param preparer the preparer used to modify the cursor on execution.
* @return the converted object or {@literal null} if none exists.
* @since 2.2
*/
@Nullable
@SuppressWarnings("ConstantConditions")
protected <T> T doFindOne(String collectionName, CollectionPreparer<MongoCollection<Document>> collectionPreparer,
Document query, Document fields, CursorPreparer preparer, Class<T> entityClass) {
protected <T> T doFindOne(String collectionName, Document query, Document fields, CursorPreparer preparer,
Class<T> entityClass) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
@@ -2435,7 +2382,7 @@ public class MongoTemplate
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
}
return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer),
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields, preparer),
new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName);
}
@@ -2444,15 +2391,13 @@ public class MongoTemplate
* query document is specified as a standard Document and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record
* @param fields the document that specifies the fields to be returned
* @param entityClass the parameterized type of the returned list.
* @return the List of converted objects.
*/
protected <T> List<T> doFind(String collectionName, CollectionPreparer<MongoCollection<Document>> collectionPreparer,
Document query, Document fields, Class<T> entityClass) {
return doFind(collectionName, collectionPreparer, query, fields, entityClass, null,
protected <T> List<T> doFind(String collectionName, Document query, Document fields, Class<T> entityClass) {
return doFind(collectionName, query, fields, entityClass, null,
new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName));
}
@@ -2462,7 +2407,6 @@ public class MongoTemplate
* specified as a standard Document and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record.
* @param fields the document that specifies the fields to be returned.
* @param entityClass the parameterized type of the returned list.
@@ -2470,15 +2414,14 @@ public class MongoTemplate
* (apply limits, skips and so on).
* @return the {@link List} of converted objects.
*/
protected <T> List<T> doFind(String collectionName, CollectionPreparer<MongoCollection<Document>> collectionPreparer,
Document query, Document fields, Class<T> entityClass, CursorPreparer preparer) {
return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer,
protected <T> List<T> doFind(String collectionName, Document query, Document fields, Class<T> entityClass,
CursorPreparer preparer) {
return doFind(collectionName, query, fields, entityClass, preparer,
new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName));
}
protected <S, T> List<T> doFind(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, Document fields,
Class<S> entityClass, @Nullable CursorPreparer preparer, DocumentCallback<T> objectCallback) {
protected <S, T> List<T> doFind(String collectionName, Document query, Document fields, Class<S> entityClass,
@Nullable CursorPreparer preparer, DocumentCallback<T> objectCallback) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
@@ -2491,7 +2434,7 @@ public class MongoTemplate
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
}
return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null),
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields, null),
preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, objectCallback, collectionName);
}
@@ -2501,8 +2444,8 @@ public class MongoTemplate
*
* @since 2.0
*/
<S, T> List<T> doFind(CollectionPreparer<MongoCollection<Document>> collectionPreparer, String collectionName,
Document query, Document fields, Class<S> sourceClass, Class<T> targetClass, CursorPreparer preparer) {
<S, T> List<T> doFind(String collectionName, Document query, Document fields, Class<S> sourceClass,
Class<T> targetClass, CursorPreparer preparer) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
EntityProjection<T, S> projection = operations.introspectProjection(targetClass, sourceClass);
@@ -2516,7 +2459,7 @@ public class MongoTemplate
serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName));
}
return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), preparer,
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields, null), preparer,
new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName);
}
@@ -2590,8 +2533,8 @@ public class MongoTemplate
* @return the List of converted objects.
*/
@SuppressWarnings("ConstantConditions")
protected <T> T doFindAndRemove(CollectionPreparer collectionPreparer, String collectionName, Document query,
Document fields, Document sort, @Nullable Collation collation, Class<T> entityClass) {
protected <T> T doFindAndRemove(String collectionName, Document query, Document fields, Document sort,
@Nullable Collation collation, Class<T> entityClass) {
EntityReader<? super T, Bson> readerToUse = this.mongoConverter;
@@ -2602,15 +2545,14 @@ public class MongoTemplate
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer,
queryMapper.getMappedObject(query, entity), fields, sort, collation),
return executeFindOneInternal(
new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation),
new ReadDocumentCallback<>(readerToUse, entityClass, collectionName), collectionName);
}
@SuppressWarnings("ConstantConditions")
protected <T> T doFindAndModify(CollectionPreparer collectionPreparer, String collectionName, Document query,
Document fields, Document sort, Class<T> entityClass, UpdateDefinition update,
@Nullable FindAndModifyOptions options) {
protected <T> T doFindAndModify(String collectionName, Document query, Document fields, Document sort,
Class<T> entityClass, UpdateDefinition update, @Nullable FindAndModifyOptions options) {
EntityReader<? super T, Bson> readerToUse = this.mongoConverter;
@@ -2635,7 +2577,7 @@ public class MongoTemplate
}
return executeFindOneInternal(
new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate,
new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate,
update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options),
new ReadDocumentCallback<>(readerToUse, entityClass, collectionName), collectionName);
}
@@ -2656,18 +2598,14 @@ public class MongoTemplate
* {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}.
*/
@Nullable
protected <T> T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery,
Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation,
Class<?> entityType, Document replacement, FindAndReplaceOptions options, Class<T> resultType) {
protected <T> T doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields,
Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, Class<?> entityType,
Document replacement, FindAndReplaceOptions options, Class<T> resultType) {
EntityProjection<T, ?> projection = operations.introspectProjection(resultType, entityType);
return doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, collation,
entityType, replacement, options, projection);
}
CollectionPreparerDelegate createDelegate(Query query) {
return CollectionPreparerDelegate.of(query);
return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort, collation, entityType, replacement,
options, projection);
}
/**
@@ -2687,9 +2625,9 @@ public class MongoTemplate
* @since 3.4
*/
@Nullable
private <T> T doFindAndReplace(CollectionPreparer collectionPreparer, String collectionName, Document mappedQuery,
Document mappedFields, Document mappedSort, @Nullable com.mongodb.client.model.Collation collation,
Class<?> entityType, Document replacement, FindAndReplaceOptions options, EntityProjection<T, ?> projection) {
private <T> T doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields,
Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, Class<?> entityType,
Document replacement, FindAndReplaceOptions options, EntityProjection<T, ?> projection) {
if (LOGGER.isDebugEnabled()) {
LOGGER
@@ -2700,9 +2638,9 @@ public class MongoTemplate
serializeToJsonSafely(mappedSort), entityType, serializeToJsonSafely(replacement), collectionName));
}
return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, mappedSort,
replacement, collation, options), new ProjectingReadCallback<>(mongoConverter, projection, collectionName),
collectionName);
return executeFindOneInternal(
new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options),
new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName);
}
/**
@@ -2872,15 +2810,12 @@ public class MongoTemplate
*/
private static class FindOneCallback implements CollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Optional<Document> fields;
private final CursorPreparer cursorPreparer;
FindOneCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, Document fields,
CursorPreparer preparer) {
FindOneCallback(Document query, Document fields, CursorPreparer preparer) {
this.collectionPreparer = collectionPreparer;
this.query = query;
this.fields = Optional.of(fields).filter(it -> !ObjectUtils.isEmpty(fields));
this.cursorPreparer = preparer;
@@ -2889,8 +2824,7 @@ public class MongoTemplate
@Override
public Document doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
FindIterable<Document> iterable = cursorPreparer.initiateFind(collection,
col -> collectionPreparer.prepare(col).find(query, Document.class));
FindIterable<Document> iterable = cursorPreparer.initiateFind(collection, col -> col.find(query, Document.class));
if (LOGGER.isDebugEnabled()) {
@@ -2917,18 +2851,15 @@ public class MongoTemplate
*/
private static class FindCallback implements CollectionCallback<FindIterable<Document>> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Document fields;
private final @Nullable com.mongodb.client.model.Collation collation;
public FindCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
Document fields, @Nullable com.mongodb.client.model.Collation collation) {
public FindCallback(Document query, Document fields, @Nullable com.mongodb.client.model.Collation collation) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(fields, "Fields must not be null");
this.collectionPreparer = collectionPreparer;
this.query = query;
this.fields = fields;
this.collation = collation;
@@ -2938,8 +2869,7 @@ public class MongoTemplate
public FindIterable<Document> doInCollection(MongoCollection<Document> collection)
throws MongoException, DataAccessException {
FindIterable<Document> findIterable = collectionPreparer.prepare(collection).find(query, Document.class)
.projection(fields);
FindIterable<Document> findIterable = collection.find(query, Document.class).projection(fields);
if (collation != null) {
findIterable = findIterable.collation(collation);
@@ -2957,14 +2887,11 @@ public class MongoTemplate
*/
private class ExistsCallback implements CollectionCallback<Boolean> {
private final CollectionPreparer collectionPreparer;
private final Document mappedQuery;
private final com.mongodb.client.model.Collation collation;
ExistsCallback(CollectionPreparer collectionPreparer, Document mappedQuery,
com.mongodb.client.model.Collation collation) {
ExistsCallback(Document mappedQuery, com.mongodb.client.model.Collation collation) {
this.collectionPreparer = collectionPreparer;
this.mappedQuery = mappedQuery;
this.collation = collation;
}
@@ -2972,7 +2899,7 @@ public class MongoTemplate
@Override
public Boolean doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
return doCount(collectionPreparer, collection.getNamespace().getCollectionName(), mappedQuery,
return doCount(collection.getNamespace().getCollectionName(), mappedQuery,
new CountOptions().limit(1).collation(collation)) > 0;
}
}
@@ -2985,15 +2912,12 @@ public class MongoTemplate
*/
private static class FindAndRemoveCallback implements CollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Document fields;
private final Document sort;
private final Optional<Collation> collation;
FindAndRemoveCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
Document fields, Document sort, @Nullable Collation collation) {
this.collectionPreparer = collectionPreparer;
FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) {
this.query = query;
this.fields = fields;
@@ -3007,13 +2931,12 @@ public class MongoTemplate
FindOneAndDeleteOptions opts = new FindOneAndDeleteOptions().sort(sort).projection(fields);
collation.map(Collation::toMongoCollation).ifPresent(opts::collation);
return collectionPreparer.prepare(collection).findOneAndDelete(query, opts);
return collection.findOneAndDelete(query, opts);
}
}
private static class FindAndModifyCallback implements CollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Document fields;
private final Document sort;
@@ -3021,10 +2944,9 @@ public class MongoTemplate
private final List<Document> arrayFilters;
private final FindAndModifyOptions options;
FindAndModifyCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
Document fields, Document sort, Object update, List<Document> arrayFilters, FindAndModifyOptions options) {
FindAndModifyCallback(Document query, Document fields, Document sort, Object update, List<Document> arrayFilters,
FindAndModifyOptions options) {
this.collectionPreparer = collectionPreparer;
this.query = query;
this.fields = fields;
this.sort = sort;
@@ -3053,9 +2975,9 @@ public class MongoTemplate
}
if (update instanceof Document) {
return collectionPreparer.prepare(collection).findOneAndUpdate(query, (Document) update, opts);
return collection.findOneAndUpdate(query, (Document) update, opts);
} else if (update instanceof List) {
return collectionPreparer.prepare(collection).findOneAndUpdate(query, (List<Document>) update, opts);
return collection.findOneAndUpdate(query, (List<Document>) update, opts);
}
throw new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update));
@@ -3071,7 +2993,6 @@ public class MongoTemplate
*/
private static class FindAndReplaceCallback implements CollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Document fields;
private final Document sort;
@@ -3079,10 +3000,9 @@ public class MongoTemplate
private final @Nullable com.mongodb.client.model.Collation collation;
private final FindAndReplaceOptions options;
FindAndReplaceCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
Document fields, Document sort, Document update, @Nullable com.mongodb.client.model.Collation collation,
FindAndReplaceOptions options) {
this.collectionPreparer = collectionPreparer;
FindAndReplaceCallback(Document query, Document fields, Document sort, Document update,
@Nullable com.mongodb.client.model.Collation collation, FindAndReplaceOptions options) {
this.query = query;
this.fields = fields;
this.sort = sort;
@@ -3107,7 +3027,7 @@ public class MongoTemplate
opts.returnDocument(ReturnDocument.AFTER);
}
return collectionPreparer.prepare(collection).findOneAndReplace(query, update, opts);
return collection.findOneAndReplace(query, update, opts);
}
}
@@ -3289,6 +3209,11 @@ public class MongoTemplate
return cursorToUse;
}
@Override
public ReadPreference getReadPreference() {
return query.getMeta().getFlags().contains(CursorOption.SECONDARY_READS) ? ReadPreference.primaryPreferred()
: null;
}
}
/**
@@ -3474,7 +3399,6 @@ public class MongoTemplate
@FunctionalInterface
interface CountExecution {
long countDocuments(CollectionPreparer collectionPreparer, String collection, Document filter,
CountOptions options);
long countDocuments(String collection, Document filter, CountOptions options);
}
}

View File

@@ -20,7 +20,6 @@ import reactor.core.publisher.Mono;
import org.bson.Document;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.SerializationUtils;
@@ -68,8 +67,8 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
private final String collection;
private final Query query;
ReactiveFindSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType, String collection,
Query query) {
ReactiveFindSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
String collection, Query query) {
this.template = template;
this.domainType = domainType;
@@ -170,8 +169,8 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
Document queryObject = query.getQueryObject();
Document fieldsObject = query.getFieldsObject();
return template.doFind(getCollectionName(), ReactiveCollectionPreparerDelegate.of(query), queryObject,
fieldsObject, domainType, returnType, preparer != null ? preparer : getCursorPreparer(query));
return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType,
preparer != null ? preparer : getCursorPreparer(query));
}
@SuppressWarnings("unchecked")

View File

@@ -30,6 +30,7 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
@@ -70,7 +71,6 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils;
import org.springframework.data.mongodb.SessionSynchronization;
import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate;
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
@@ -81,7 +81,6 @@ import org.springframework.data.mongodb.core.QueryOperations.UpdateContext;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder;
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext;
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
@@ -107,6 +106,7 @@ import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Meta;
import org.springframework.data.mongodb.core.query.Meta.CursorOption;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
@@ -148,18 +148,9 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
* extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and
* catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the
* org.springframework.dao package. Can be used within a service implementation via direct instantiation with a
* {@link ReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as
* bean reference.
* <p>
* Note: The {@link ReactiveMongoDatabaseFactory} should always be configured as a bean in the application context, in
* the first case given to the service directly, in the second case to the prepared template.
* <h3>{@link ReadPreference} and {@link com.mongodb.ReadConcern}</h3>
* <p>
* {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and
* {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}.
* <p>
* You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to
* generally apply a {@link ReadPreference}.
* {@link SimpleReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services
* as bean reference. Note: The {@link SimpleReactiveMongoDatabaseFactory} should always be configured as a bean in the
* application context, in the first case given to the service directly, in the second case to the prepared template.
*
* @author Mark Paluch
* @author Christoph Strobl
@@ -766,8 +757,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
public <T> Mono<T> findOne(Query query, Class<T> entityClass, String collectionName) {
if (ObjectUtils.isEmpty(query.getSortObject())) {
return doFindOne(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(),
query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass));
return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass,
new QueryFindPublisherPreparer(query, entityClass));
}
query.limit(1);
@@ -793,11 +784,10 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
return createFlux(collectionName, collection -> {
ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query);
QueryContext queryContext = queryOperations.createQueryContext(query);
Document filter = queryContext.getMappedQuery(entityClass, this::getPersistentEntity);
FindPublisher<Document> findPublisher = collectionPreparer.prepare(collection).find(filter, Document.class)
FindPublisher<Document> findPublisher = collection.find(filter, Document.class)
.projection(new Document("_id", 1));
if (LOGGER.isDebugEnabled()) {
@@ -822,8 +812,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
return findAll(entityClass, collectionName);
}
return doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(),
query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass));
return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass,
new QueryFindPublisherPreparer(query, entityClass));
}
@Override
@@ -836,8 +826,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
String idKey = operations.getIdPropertyName(entityClass);
return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), null, entityClass,
(Collation) null);
return doFindOne(collectionName, new Document(idKey, id), null, entityClass, (Collation) null);
}
@Override
@@ -862,7 +851,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
Document mappedQuery = distinctQueryContext.getMappedQuery(entity);
String mappedFieldName = distinctQueryContext.getMappedFieldName(entity);
Class<T> mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass);
ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query);
Flux<?> result = execute(collectionName, collection -> {
@@ -872,9 +860,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
}
FindPublisherPreparer preparer = new QueryFindPublisherPreparer(query, entityClass);
if (preparer.hasReadPreference()) {
collection = collection.withReadPreference(preparer.getReadPreference());
}
DistinctPublisher<T> publisher = collectionPreparer.prepare(collection).distinct(mappedFieldName, mappedQuery,
mongoDriverCompatibleType);
DistinctPublisher<T> publisher = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType);
distinctQueryContext.applyCollation(entityClass, publisher::collation);
return publisher;
});
@@ -940,8 +930,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback<O> readCallback,
@Nullable Class<?> inputType) {
ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(options);
AggregatePublisher<Document> cursor = collectionPreparer.prepare(collection).aggregate(pipeline, Document.class)
AggregatePublisher<Document> cursor = collection.aggregate(pipeline, Document.class)
.allowDiskUse(options.isAllowDiskUse());
if (options.getCursorBatchSize() != null) {
@@ -999,19 +988,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
GeoNearResultDocumentCallback<T> callback = new GeoNearResultDocumentCallback<>(distanceField,
new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric());
Builder optionsBuilder = AggregationOptions.builder();
if (near.hasReadPreference()) {
optionsBuilder.readPreference(near.getReadPreference());
}
if(near.hasReadConcern()) {
optionsBuilder.readConcern(near.getReadConcern());
}
optionsBuilder.collation(near.getCollation());
Aggregation $geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, distanceField))
.withOptions(optionsBuilder.build());
.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
return aggregate($geoNear, collection, Document.class) //
.concatMap(callback::doWith);
@@ -1051,8 +1029,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation);
}
return doFindAndModify(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(),
query.getFieldsObject(), getMappedSortObject(query, entityClass), entityClass, update, optionsToUse);
return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(),
getMappedSortObject(query, entityClass), entityClass, update, optionsToUse);
}
@Override
@@ -1076,7 +1054,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
Document mappedQuery = queryContext.getMappedQuery(entity);
Document mappedFields = queryContext.getMappedFields(entity, projection);
Document mappedSort = queryContext.getMappedSort(entity);
ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query);
return Mono.defer(() -> {
@@ -1094,9 +1071,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
mapped.getCollection()));
}).flatMap(it -> {
Mono<T> afterFindAndReplace = doFindAndReplace(it.getCollection(), collectionPreparer, mappedQuery,
mappedFields, mappedSort, queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(),
options, projection);
Mono<T> afterFindAndReplace = doFindAndReplace(it.getCollection(), mappedQuery, mappedFields, mappedSort,
queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), options, projection);
return afterFindAndReplace.flatMap(saved -> {
maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), it.getCollection()));
return maybeCallAfterSave(saved, it.getTarget(), it.getCollection());
@@ -1114,9 +1090,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
public <T> Mono<T> findAndRemove(Query query, Class<T> entityClass, String collectionName) {
operations.forType(entityClass).getCollation(query);
return doFindAndRemove(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(),
query.getFieldsObject(), getMappedSortObject(query, entityClass),
operations.forType(entityClass).getCollation(query).orElse(null), entityClass);
return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(),
getMappedSortObject(query, entityClass), operations.forType(entityClass).getCollation(query).orElse(null),
entityClass);
}
/*
@@ -1456,7 +1432,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
Document dbDoc = entity.toMappedDocument(writer).getDocument();
maybeEmitEvent(new BeforeSaveEvent<T>(toConvert, dbDoc, collectionName));
return maybeCallBeforeSave(toConvert, dbDoc, collectionName).flatMap(it -> {
return maybeCallBeforeSave(toConvert, dbDoc, collectionName)
.flatMap(it -> {
return saveDocument(collectionName, dbDoc, it.getClass()).flatMap(id -> {
@@ -1469,6 +1446,26 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
});
}
private Mono<Document> resolveValues(Mono<Document> document) {
return document.flatMap(source -> {
for (Entry<String, Object> entry : source.entrySet()) {
if (entry.getValue()instanceof Mono<?> valueMono) {
return valueMono.flatMap(value -> {
source.put(entry.getKey(), value);
return resolveValues(Mono.just(source));
});
}
if (entry.getValue()instanceof Document nested) {
return resolveValues(Mono.just(nested)).map(it -> {
source.put(entry.getKey(), it);
return source;
});
}
}
return Mono.just(source);
});
}
protected Mono<Object> insertDocument(String collectionName, Document dbDoc, Class<?> entityClass) {
if (LOGGER.isDebugEnabled()) {
@@ -1552,16 +1549,16 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
? collection //
: collection.withWriteConcern(writeConcernToUse);
Publisher<?> publisher;
Publisher<?> publisher = null;
Mono<Document> resolved = resolveValues(Mono.just(queryOperations.createInsertContext(mapped).prepareId(entityClass).getDocument()));
if (!mapped.hasId()) {
publisher = collectionToUse
.insertOne(queryOperations.createInsertContext(mapped).prepareId(entityClass).getDocument());
publisher = resolved.flatMap(it -> Mono.from(collectionToUse.insertOne(it)));
} else {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true);
Document filter = updateContext.getMappedQuery(entity);
Document replacement = updateContext.getMappedUpdate(entity);
Mono<Document> replacement = resolveValues(Mono.just(updateContext.getMappedUpdate(entity)));
Mono<Document> deferredFilter;
@@ -1572,14 +1569,17 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
deferredFilter = Mono
.from(
collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first())
.defaultIfEmpty(replacement).map(it -> updateContext.applyShardKey(entity, filter, it));
.switchIfEmpty(replacement)
.map(it -> {
return updateContext.applyShardKey(entity, filter, it);
});
}
} else {
deferredFilter = Mono.just(filter);
}
publisher = deferredFilter.flatMapMany(
it -> collectionToUse.replaceOne(it, replacement, updateContext.getReplaceOptions(entityClass)));
publisher = deferredFilter.zipWith(replacement).flatMapMany(
it -> collectionToUse.replaceOne(it.getT1(), it.getT2(), updateContext.getReplaceOptions(entityClass)));
}
return Mono.from(publisher).map(o -> mapped.getId());
@@ -1824,14 +1824,12 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass,
null, removeQuery);
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query);
return execute(collectionName, collection -> {
maybeEmitEvent(new BeforeDeleteEvent<>(removeQuery, entityClass, collectionName));
MongoCollection<Document> collectionToUse = collectionPreparer
.prepare(prepareCollection(collection, writeConcernToUse));
MongoCollection<Document> collectionToUse = prepareCollection(collection, writeConcernToUse);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery),
@@ -1866,9 +1864,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
@Override
public <T> Flux<T> findAll(Class<T> entityClass, String collectionName) {
return executeFindMultiInternal(new FindCallback(CollectionPreparer.identity(), null),
FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName),
collectionName);
return executeFindMultiInternal(new FindCallback(null), FindPublisherPreparer.NO_OP_PREPARER,
new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), collectionName);
}
@Override
@@ -1895,19 +1892,17 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
@Override
public <T> Flux<T> tail(@Nullable Query query, Class<T> entityClass, String collectionName) {
ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query);
if (query == null) {
LOGGER.debug(String.format("Tail for class: %s in collection: %s", entityClass, collectionName));
return executeFindMultiInternal(
collection -> new FindCallback(collectionPreparer, null).doInCollection(collection)
.cursorType(CursorType.TailableAwait),
collection -> new FindCallback(null).doInCollection(collection).cursorType(CursorType.TailableAwait),
FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName),
collectionName);
}
return doFind(collectionName, collectionPreparer, query.getQueryObject(), query.getFieldsObject(), entityClass,
return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass,
new TailingQueryFindPublisherPreparer(query, entityClass));
}
@@ -1991,14 +1986,12 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
assertLocalFunctionNames(mapFunction, reduceFunction);
ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(filterQuery);
return createFlux(inputCollectionName, collection -> {
Document mappedQuery = queryMapper.getMappedObject(filterQuery.getQueryObject(),
mappingContext.getPersistentEntity(domainType));
MapReducePublisher<Document> publisher = collectionPreparer.prepare(collection).mapReduce(mapFunction,
reduceFunction, Document.class);
MapReducePublisher<Document> publisher = collection.mapReduce(mapFunction, reduceFunction, Document.class);
publisher.filter(mappedQuery);
@@ -2165,18 +2158,16 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* The query document is specified as a standard {@link Document} and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record.
* @param fields the document that specifies the fields to be returned.
* @param entityClass the parameterized type of the returned list.
* @param collation can be {@literal null}.
* @return the {@link List} of converted objects.
*/
protected <T> Mono<T> doFindOne(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, @Nullable Document fields,
protected <T> Mono<T> doFindOne(String collectionName, Document query, @Nullable Document fields,
Class<T> entityClass, @Nullable Collation collation) {
return doFindOne(collectionName, collectionPreparer, query, fields, entityClass,
return doFindOne(collectionName, query, fields, entityClass,
findPublisher -> collation != null ? findPublisher.collation(collation.toMongoCollation()) : findPublisher);
}
@@ -2185,7 +2176,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* The query document is specified as a standard {@link Document} and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record.
* @param fields the document that specifies the fields to be returned.
* @param entityClass the parameterized type of the returned list.
@@ -2193,8 +2183,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* @return the {@link List} of converted objects.
* @since 2.2
*/
protected <T> Mono<T> doFindOne(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, @Nullable Document fields,
protected <T> Mono<T> doFindOne(String collectionName, Document query, @Nullable Document fields,
Class<T> entityClass, FindPublisherPreparer preparer) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
@@ -2209,7 +2198,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
}
return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer),
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields, preparer),
new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName);
}
@@ -2218,15 +2207,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* query document is specified as a standard Document and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record
* @param fields the document that specifies the fields to be returned
* @param entityClass the parameterized type of the returned list.
* @return the List of converted objects.
*/
protected <T> Flux<T> doFind(String collectionName, CollectionPreparer<MongoCollection<Document>> collectionPreparer,
Document query, Document fields, Class<T> entityClass) {
return doFind(collectionName, collectionPreparer, query, fields, entityClass, null,
protected <T> Flux<T> doFind(String collectionName, Document query, Document fields, Class<T> entityClass) {
return doFind(collectionName, query, fields, entityClass, null,
new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName));
}
@@ -2236,7 +2223,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* specified as a standard Document and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record.
* @param fields the document that specifies the fields to be returned.
* @param entityClass the parameterized type of the returned list.
@@ -2244,15 +2230,14 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* the result set, (apply limits, skips and so on).
* @return the {@link List} of converted objects.
*/
protected <T> Flux<T> doFind(String collectionName, CollectionPreparer<MongoCollection<Document>> collectionPreparer,
Document query, Document fields, Class<T> entityClass, FindPublisherPreparer preparer) {
return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer,
protected <T> Flux<T> doFind(String collectionName, Document query, Document fields, Class<T> entityClass,
FindPublisherPreparer preparer) {
return doFind(collectionName, query, fields, entityClass, preparer,
new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName));
}
protected <S, T> Flux<T> doFind(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, Document fields,
Class<S> entityClass, @Nullable FindPublisherPreparer preparer, DocumentCallback<T> objectCallback) {
protected <S, T> Flux<T> doFind(String collectionName, Document query, Document fields, Class<S> entityClass,
@Nullable FindPublisherPreparer preparer, DocumentCallback<T> objectCallback) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
@@ -2265,8 +2250,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
}
return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer,
objectCallback, collectionName);
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback,
collectionName);
}
/**
@@ -2275,8 +2260,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
*
* @since 2.0
*/
<S, T> Flux<T> doFind(String collectionName, CollectionPreparer<MongoCollection<Document>> collectionPreparer,
Document query, Document fields, Class<S> sourceClass, Class<T> targetClass, FindPublisherPreparer preparer) {
<S, T> Flux<T> doFind(String collectionName, Document query, Document fields, Class<S> sourceClass,
Class<T> targetClass, FindPublisherPreparer preparer) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
EntityProjection<T, S> projection = operations.introspectProjection(targetClass, sourceClass);
@@ -2290,7 +2275,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName));
}
return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer,
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer,
new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName);
}
@@ -2308,15 +2293,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* The first document that matches the query is returned and also removed from the collection in the database. <br />
* The query document is specified as a standard Document and so is the fields specification.
*
* @param collectionName name of the collection to retrieve the objects from.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param query the query document that specifies the criteria used to find a record.
* @param collation collation.
* @param collectionName name of the collection to retrieve the objects from
* @param query the query document that specifies the criteria used to find a record
* @param collation collation
* @param entityClass the parameterized type of the returned list.
* @return the List of converted objects.
*/
protected <T> Mono<T> doFindAndRemove(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, Document fields, Document sort,
protected <T> Mono<T> doFindAndRemove(String collectionName, Document query, Document fields, Document sort,
@Nullable Collation collation, Class<T> entityClass) {
if (LOGGER.isDebugEnabled()) {
@@ -2326,13 +2309,12 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer,
queryMapper.getMappedObject(query, entity), fields, sort, collation),
return executeFindOneInternal(
new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation),
new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName);
}
protected <T> Mono<T> doFindAndModify(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, Document fields, Document sort,
protected <T> Mono<T> doFindAndModify(String collectionName, Document query, Document fields, Document sort,
Class<T> entityClass, UpdateDefinition update, FindAndModifyOptions options) {
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
@@ -2353,7 +2335,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
}
return executeFindOneInternal(
new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate,
new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate,
update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options),
new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName);
});
@@ -2363,7 +2345,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* Customize this part for findAndReplace.
*
* @param collectionName The name of the collection to perform the operation in.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param mappedQuery the query to look up documents.
* @param mappedFields the fields to project the result to.
* @param mappedSort the sort to be applied when executing the query.
@@ -2376,22 +2357,20 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}.
* @since 2.1
*/
protected <T> Mono<T> doFindAndReplace(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document mappedQuery, Document mappedFields,
protected <T> Mono<T> doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields,
Document mappedSort, com.mongodb.client.model.Collation collation, Class<?> entityType, Document replacement,
FindAndReplaceOptions options, Class<T> resultType) {
EntityProjection<T, ?> projection = operations.introspectProjection(resultType, entityType);
return doFindAndReplace(collectionName, collectionPreparer, mappedQuery, mappedFields, mappedSort, collation,
entityType, replacement, options, projection);
return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort, collation, entityType, replacement,
options, projection);
}
/**
* Customize this part for findAndReplace.
*
* @param collectionName The name of the collection to perform the operation in.
* @param collectionPreparer the preparer to prepare the collection for the actual use.
* @param mappedQuery the query to look up documents.
* @param mappedFields the fields to project the result to.
* @param mappedSort the sort to be applied when executing the query.
@@ -2404,8 +2383,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}.
* @since 3.4
*/
private <T> Mono<T> doFindAndReplace(String collectionName,
CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document mappedQuery, Document mappedFields,
private <T> Mono<T> doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields,
Document mappedSort, com.mongodb.client.model.Collation collation, Class<?> entityType, Document replacement,
FindAndReplaceOptions options, EntityProjection<T, ?> projection) {
@@ -2419,8 +2397,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
serializeToJsonSafely(replacement), collectionName));
}
return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields,
mappedSort, replacement, collation, options),
return executeFindOneInternal(
new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options),
new ProjectingReadCallback<>(this.mongoConverter, projection, collectionName), collectionName);
});
@@ -2498,12 +2476,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
* @param collection
*/
protected MongoCollection<Document> prepareCollection(MongoCollection<Document> collection) {
if (this.readPreference != null && this.readPreference != collection.getReadPreference()) {
return collection.withReadPreference(readPreference);
}
return collection;
return this.readPreference != null ? collection.withReadPreference(readPreference) : collection;
}
/**
@@ -2673,14 +2646,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
*/
private static class FindOneCallback implements ReactiveCollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Optional<Document> fields;
private final FindPublisherPreparer preparer;
FindOneCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
@Nullable Document fields, FindPublisherPreparer preparer) {
this.collectionPreparer = collectionPreparer;
FindOneCallback(Document query, @Nullable Document fields, FindPublisherPreparer preparer) {
this.query = query;
this.fields = Optional.ofNullable(fields);
this.preparer = preparer;
@@ -2697,8 +2667,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
serializeToJsonSafely(fields.orElseGet(Document::new)), collection.getNamespace().getFullName()));
}
FindPublisher<Document> publisher = preparer.initiateFind(collectionPreparer.prepare(collection),
col -> col.find(query, Document.class));
FindPublisher<Document> publisher = preparer.initiateFind(collection, col -> col.find(query, Document.class));
if (fields.isPresent()) {
publisher = publisher.projection(fields.get());
@@ -2716,17 +2685,15 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
*/
private static class FindCallback implements ReactiveCollectionQueryCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final @Nullable Document query;
private final @Nullable Document fields;
FindCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, @Nullable Document query) {
this(collectionPreparer, query, null);
FindCallback(@Nullable Document query) {
this(query, null);
}
FindCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query, Document fields) {
this.collectionPreparer = collectionPreparer;
FindCallback(Document query, Document fields) {
this.query = query;
this.fields = fields;
}
@@ -2734,12 +2701,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
@Override
public FindPublisher<Document> doInCollection(MongoCollection<Document> collection) {
MongoCollection<Document> collectionToUse = collectionPreparer.prepare(collection);
FindPublisher<Document> findPublisher;
if (ObjectUtils.isEmpty(query)) {
findPublisher = collectionToUse.find(Document.class);
findPublisher = collection.find(Document.class);
} else {
findPublisher = collectionToUse.find(query, Document.class);
findPublisher = collection.find(query, Document.class);
}
if (ObjectUtils.isEmpty(fields)) {
@@ -2758,15 +2724,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
*/
private static class FindAndRemoveCallback implements ReactiveCollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Document fields;
private final Document sort;
private final Optional<Collation> collation;
FindAndRemoveCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
Document fields, Document sort, @Nullable Collation collation) {
this.collectionPreparer = collectionPreparer;
FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) {
this.query = query;
this.fields = fields;
this.sort = sort;
@@ -2780,7 +2744,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort);
collation.map(Collation::toMongoCollation).ifPresent(findOneAndDeleteOptions::collation);
return collectionPreparer.prepare(collection).findOneAndDelete(query, findOneAndDeleteOptions);
return collection.findOneAndDelete(query, findOneAndDeleteOptions);
}
}
@@ -2789,7 +2753,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
*/
private static class FindAndModifyCallback implements ReactiveCollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Document fields;
private final Document sort;
@@ -2797,10 +2760,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
private final List<Document> arrayFilters;
private final FindAndModifyOptions options;
FindAndModifyCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
Document fields, Document sort, Object update, List<Document> arrayFilters, FindAndModifyOptions options) {
FindAndModifyCallback(Document query, Document fields, Document sort, Object update, List<Document> arrayFilters,
FindAndModifyOptions options) {
this.collectionPreparer = collectionPreparer;
this.query = query;
this.fields = fields;
this.sort = sort;
@@ -2813,22 +2775,21 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
public Publisher<Document> doInCollection(MongoCollection<Document> collection)
throws MongoException, DataAccessException {
MongoCollection<Document> collectionToUse = collectionPreparer.prepare(collection);
if (options.isRemove()) {
FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort);
findOneAndDeleteOptions = options.getCollation().map(Collation::toMongoCollation)
.map(findOneAndDeleteOptions::collation).orElse(findOneAndDeleteOptions);
return collectionToUse.findOneAndDelete(query, findOneAndDeleteOptions);
return collection.findOneAndDelete(query, findOneAndDeleteOptions);
}
FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort,
arrayFilters);
if (update instanceof Document) {
return collectionToUse.findOneAndUpdate(query, (Document) update, findOneAndUpdateOptions);
return collection.findOneAndUpdate(query, (Document) update, findOneAndUpdateOptions);
} else if (update instanceof List) {
return collectionToUse.findOneAndUpdate(query, (List<Document>) update, findOneAndUpdateOptions);
return collection.findOneAndUpdate(query, (List<Document>) update, findOneAndUpdateOptions);
}
return Flux
@@ -2867,7 +2828,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
*/
private static class FindAndReplaceCallback implements ReactiveCollectionCallback<Document> {
private final CollectionPreparer<MongoCollection<Document>> collectionPreparer;
private final Document query;
private final Document fields;
private final Document sort;
@@ -2875,10 +2835,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
private final @Nullable com.mongodb.client.model.Collation collation;
private final FindAndReplaceOptions options;
FindAndReplaceCallback(CollectionPreparer<MongoCollection<Document>> collectionPreparer, Document query,
Document fields, Document sort, Document update, com.mongodb.client.model.Collation collation,
FindAndReplaceOptions options) {
this.collectionPreparer = collectionPreparer;
FindAndReplaceCallback(Document query, Document fields, Document sort, Document update,
com.mongodb.client.model.Collation collation, FindAndReplaceOptions options) {
this.query = query;
this.fields = fields;
this.sort = sort;
@@ -2892,7 +2851,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
throws MongoException, DataAccessException {
FindOneAndReplaceOptions findOneAndReplaceOptions = convertToFindOneAndReplaceOptions(options, fields, sort);
return collectionPreparer.prepare(collection).findOneAndReplace(query, update, findOneAndReplaceOptions);
return collection.findOneAndReplace(query, update, findOneAndReplaceOptions);
}
private FindOneAndReplaceOptions convertToFindOneAndReplaceOptions(FindAndReplaceOptions options, Document fields,
@@ -3158,6 +3117,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
return findPublisherToUse;
}
@Override
public ReadPreference getReadPreference() {
return query.getMeta().getFlags().contains(CursorOption.SECONDARY_READS) ? ReadPreference.primaryPreferred()
: null;
}
}
class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer {

View File

@@ -1,46 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.lang.Nullable;
import com.mongodb.ReadConcern;
/**
* Interface to be implemented by any object that wishes to expose the {@link ReadConcern}.
* <p>
* Typically implemented by cursor or query preparer objects.
*
* @author Mark Paluch
* @since 4.1
* @see org.springframework.data.mongodb.core.query.Query
* @see org.springframework.data.mongodb.core.aggregation.AggregationOptions
*/
public interface ReadConcernAware {
/**
* @return {@literal true} if a {@link ReadConcern} is set.
*/
default boolean hasReadConcern() {
return getReadConcern() != null;
}
/**
* @return the {@link ReadConcern} to apply or {@literal null} if none set.
*/
@Nullable
ReadConcern getReadConcern();
}

View File

@@ -27,8 +27,6 @@ import com.mongodb.ReadPreference;
* @author Christoph Strobl
* @author Mark Paluch
* @since 2.2
* @see org.springframework.data.mongodb.core.query.Query
* @see org.springframework.data.mongodb.core.aggregation.AggregationOptions
*/
public interface ReadPreferenceAware {

View File

@@ -28,7 +28,6 @@ import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddF
import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder;
import org.springframework.data.mongodb.core.aggregation.LookupOperation.LookupOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder;
@@ -51,7 +50,6 @@ import org.springframework.util.Assert;
* @author Nikolay Bogdanov
* @author Gustavo de Geus
* @author Jérôme Guyon
* @author Sangyong Choi
* @since 1.3
*/
public class Aggregation {
@@ -666,23 +664,6 @@ public class Aggregation {
return new LookupOperation(from, localField, foreignField, as);
}
/**
* Entrypoint for creating {@link LookupOperation $lookup} using a fluent builder API.
* <pre class="code">
* Aggregation.lookup().from("restaurants")
* .localField("restaurant_name")
* .foreignField("name")
* .let(newVariable("orders_drink").forField("drink"))
* .pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages")))))
* .as("matches")
* </pre>
* @return new instance of {@link LookupOperationBuilder}.
* @since 4.1
*/
public static LookupOperationBuilder lookup() {
return new LookupOperationBuilder();
}
/**
* Creates a new {@link CountOperationBuilder}.
*

View File

@@ -19,16 +19,11 @@ import java.time.Duration;
import java.util.Optional;
import org.bson.Document;
import org.springframework.data.mongodb.core.ReadConcernAware;
import org.springframework.data.mongodb.core.ReadPreferenceAware;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
/**
* Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support
* aggregation options can be found in the MongoDB reference documentation
@@ -44,7 +39,7 @@ import com.mongodb.ReadPreference;
* @see TypedAggregation#withOptions(AggregationOptions)
* @since 1.6
*/
public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware {
public class AggregationOptions {
private static final String BATCH_SIZE = "batchSize";
private static final String CURSOR = "cursor";
@@ -61,10 +56,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
private final Optional<Collation> collation;
private final Optional<String> comment;
private final Optional<Object> hint;
private Optional<ReadConcern> readConcern;
private Optional<ReadPreference> readPreference;
private Duration maxTime = Duration.ZERO;
private ResultOptions resultOptions = ResultOptions.READ;
private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED;
@@ -132,8 +123,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
this.collation = Optional.ofNullable(collation);
this.comment = Optional.ofNullable(comment);
this.hint = Optional.ofNullable(hint);
this.readConcern = Optional.empty();
this.readPreference = Optional.empty();
}
/**
@@ -279,26 +268,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
return hint;
}
@Override
public boolean hasReadConcern() {
return readConcern.isPresent();
}
@Override
public ReadConcern getReadConcern() {
return readConcern.orElse(null);
}
@Override
public boolean hasReadPreference() {
return readPreference.isPresent();
}
@Override
public ReadPreference getReadPreference() {
return readPreference.orElse(null);
}
/**
* @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior.
* @since 3.0
@@ -416,8 +385,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
private @Nullable Collation collation;
private @Nullable String comment;
private @Nullable Object hint;
private @Nullable ReadConcern readConcern;
private @Nullable ReadPreference readPreference;
private @Nullable Duration maxTime;
private @Nullable ResultOptions resultOptions;
private @Nullable DomainTypeMapping domainTypeMapping;
@@ -523,32 +490,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
return this;
}
/**
* Define a {@link ReadConcern} to apply to the aggregation.
*
* @param readConcern can be {@literal null}.
* @return this.
* @since 4.1
*/
public Builder readConcern(@Nullable ReadConcern readConcern) {
this.readConcern = readConcern;
return this;
}
/**
* Define a {@link ReadPreference} to apply to the aggregation.
*
* @param readPreference can be {@literal null}.
* @return this.
* @since 4.1
*/
public Builder readPreference(@Nullable ReadPreference readPreference) {
this.readPreference = readPreference;
return this;
}
/**
* Set the time limit for processing.
*
@@ -632,12 +573,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
if (domainTypeMapping != null) {
options.domainTypeMapping = domainTypeMapping;
}
if (readConcern != null) {
options.readConcern = Optional.of(readConcern);
}
if (readPreference != null) {
options.readPreference = Optional.of(readPreference);
}
return options;
}

View File

@@ -15,44 +15,28 @@
*/
package org.springframework.data.mongodb.core.aggregation;
import java.util.function.Supplier;
import org.bson.Document;
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let;
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the builder provided via
* {@link #newLookup()} instead of creating instances of this class directly.
* Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the static factory method
* {@link Aggregation#lookup(String, String, String, String)} instead of creating instances of this class directly.
*
* @author Alessio Fachechi
* @author Christoph Strobl
* @author Mark Paluch
* @author Sangyong Choi
* @since 1.9
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/lookup/">MongoDB Aggregation Framework:
* $lookup</a>
*/
public class LookupOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation {
private final String from;
@Nullable //
private final Field from;
private final Field localField;
@Nullable //
private final Field foreignField;
@Nullable //
private final Let let;
@Nullable //
private final AggregationPipeline pipeline;
private final ExposedField as;
/**
@@ -64,55 +48,16 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
* @param as must not be {@literal null}.
*/
public LookupOperation(Field from, Field localField, Field foreignField, Field as) {
this(((Supplier<String>) () -> {
Assert.notNull(from, "From must not be null");
return from.getTarget();
}).get(), localField, foreignField, null, null, as);
}
/**
* Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline
* pipeline}.
*
* @param from must not be {@literal null}.
* @param let must not be {@literal null}.
* @param as must not be {@literal null}.
* @since 4.1
*/
public LookupOperation(String from, @Nullable Let let, AggregationPipeline pipeline, Field as) {
this(from, null, null, let, pipeline, as);
}
/**
* Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline
* pipeline}.
*
* @param from must not be {@literal null}.
* @param localField can be {@literal null} if {@literal pipeline} is present.
* @param foreignField can be {@literal null} if {@literal pipeline} is present.
* @param let can be {@literal null} if {@literal localField} and {@literal foreignField} are present.
* @param as must not be {@literal null}.
* @since 4.1
*/
public LookupOperation(String from, @Nullable Field localField, @Nullable Field foreignField, @Nullable Let let,
@Nullable AggregationPipeline pipeline, Field as) {
Assert.notNull(from, "From must not be null");
if (pipeline == null) {
Assert.notNull(localField, "LocalField must not be null");
Assert.notNull(foreignField, "ForeignField must not be null");
} else if (localField == null && foreignField == null) {
Assert.notNull(pipeline, "Pipeline must not be null");
}
Assert.notNull(localField, "LocalField must not be null");
Assert.notNull(foreignField, "ForeignField must not be null");
Assert.notNull(as, "As must not be null");
this.from = from;
this.localField = localField;
this.foreignField = foreignField;
this.as = new ExposedField(as, true);
this.let = let;
this.pipeline = pipeline;
}
@Override
@@ -125,20 +70,9 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
Document lookupObject = new Document();
lookupObject.append("from", from);
if (localField != null) {
lookupObject.append("localField", localField.getTarget());
}
if (foreignField != null) {
lookupObject.append("foreignField", foreignField.getTarget());
}
if (let != null) {
lookupObject.append("let", let.toDocument(context).get("$let", Document.class).get("vars"));
}
if (pipeline != null) {
lookupObject.append("pipeline", pipeline.toDocuments(context));
}
lookupObject.append("from", from.getTarget());
lookupObject.append("localField", localField.getTarget());
lookupObject.append("foreignField", foreignField.getTarget());
lookupObject.append("as", as.getTarget());
return new Document(getOperator(), lookupObject);
@@ -167,7 +101,7 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
LocalFieldBuilder from(String name);
}
public static interface LocalFieldBuilder extends PipelineBuilder {
public static interface LocalFieldBuilder {
/**
* @param name the field from the documents input to the {@code $lookup} stage, must not be {@literal null} or
@@ -186,67 +120,7 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
AsBuilder foreignField(String name);
}
/**
* @since 4.1
* @author Christoph Strobl
*/
public interface LetBuilder {
/**
* Specifies {@link Let#getVariableNames() variables) that can be used in the
* {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}.
*
* @param let must not be {@literal null}.
* @return never {@literal null}.
* @see PipelineBuilder
*/
PipelineBuilder let(Let let);
/**
* Specifies {@link Let#getVariableNames() variables) that can be used in the
* {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}.
*
* @param variables must not be {@literal null}.
* @return never {@literal null}.
* @see PipelineBuilder
*/
default PipelineBuilder let(ExpressionVariable... variables) {
return let(Let.just(variables));
}
}
/**
* @since 4.1
* @author Christoph Strobl
*/
public interface PipelineBuilder extends LetBuilder {
/**
* Specifies the {@link AggregationPipeline pipeline} that determines the resulting documents.
*
* @param pipeline must not be {@literal null}.
* @return never {@literal null}.
*/
AsBuilder pipeline(AggregationPipeline pipeline);
/**
* Specifies the {@link AggregationPipeline#getOperations() stages} that determine the resulting documents.
*
* @param stages must not be {@literal null} can be empty.
* @return never {@literal null}.
*/
default AsBuilder pipeline(AggregationOperation... stages) {
return pipeline(AggregationPipeline.of(stages));
}
/**
* @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty.
* @return new instance of {@link LookupOperation}.
*/
LookupOperation as(String name);
}
public static interface AsBuilder extends PipelineBuilder {
public static interface AsBuilder {
/**
* @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty.
@@ -264,12 +138,10 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
public static final class LookupOperationBuilder
implements FromBuilder, LocalFieldBuilder, ForeignFieldBuilder, AsBuilder {
private @Nullable String from;
private @Nullable Field from;
private @Nullable Field localField;
private @Nullable Field foreignField;
private @Nullable ExposedField as;
private @Nullable Let let;
private @Nullable AggregationPipeline pipeline;
/**
* Creates new builder for {@link LookupOperation}.
@@ -284,10 +156,18 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
public LocalFieldBuilder from(String name) {
Assert.hasText(name, "'From' must not be null or empty");
from = name;
from = Fields.field(name);
return this;
}
@Override
public LookupOperation as(String name) {
Assert.hasText(name, "'As' must not be null or empty");
as = new ExposedField(Fields.field(name), true);
return new LookupOperation(from, localField, foreignField, as);
}
@Override
public AsBuilder foreignField(String name) {
@@ -303,29 +183,5 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
localField = Fields.field(name);
return this;
}
@Override
public PipelineBuilder let(Let let) {
Assert.notNull(let, "Let must not be null");
this.let = let;
return this;
}
@Override
public AsBuilder pipeline(AggregationPipeline pipeline) {
Assert.notNull(pipeline, "Pipeline must not be null");
this.pipeline = pipeline;
return this;
}
@Override
public LookupOperation as(String name) {
Assert.hasText(name, "'As' must not be null or empty");
as = new ExposedField(Fields.field(name), true);
return new LookupOperation(from, localField, foreignField, let, pipeline, as);
}
}
}

View File

@@ -16,6 +16,7 @@
package org.springframework.data.mongodb.core.aggregation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -223,41 +224,28 @@ public class VariableOperators {
public static class Let implements AggregationExpression {
private final List<ExpressionVariable> vars;
@Nullable //
private final AggregationExpression expression;
private Let(List<ExpressionVariable> vars, @Nullable AggregationExpression expression) {
private Let(List<ExpressionVariable> vars, AggregationExpression expression) {
this.vars = vars;
this.expression = expression;
}
/**
* Create a new {@link Let} holding just the given {@literal variables}.
*
* @param variables must not be {@literal null}.
* @return new instance of {@link Let}.
* @since 4.1
*/
public static Let just(ExpressionVariable... variables) {
return new Let(List.of(variables), null);
}
/**
* Start creating new {@link Let} by defining the variables for {@code $vars}.
*
* @param variables must not be {@literal null}.
* @return
*/
public static LetBuilder define(Collection<ExpressionVariable> variables) {
public static LetBuilder define(final Collection<ExpressionVariable> variables) {
Assert.notNull(variables, "Variables must not be null");
return new LetBuilder() {
@Override
public Let andApply(AggregationExpression expression) {
public Let andApply(final AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
return new Let(new ArrayList<ExpressionVariable>(variables), expression);
@@ -271,10 +259,19 @@ public class VariableOperators {
* @param variables must not be {@literal null}.
* @return
*/
public static LetBuilder define(ExpressionVariable... variables) {
public static LetBuilder define(final ExpressionVariable... variables) {
Assert.notNull(variables, "Variables must not be null");
return define(List.of(variables));
return new LetBuilder() {
@Override
public Let andApply(final AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
return new Let(Arrays.asList(variables), expression);
}
};
}
public interface LetBuilder {
@@ -286,11 +283,10 @@ public class VariableOperators {
* @return
*/
Let andApply(AggregationExpression expression);
}
@Override
public Document toDocument(AggregationOperationContext context) {
public Document toDocument(final AggregationOperationContext context) {
return toLet(ExposedFields.synthetic(Fields.fields(getVariableNames())), context);
}
@@ -316,22 +312,16 @@ public class VariableOperators {
}
letExpression.put("vars", mappedVars);
if (expression != null) {
letExpression.put("in", getMappedIn(operationContext));
}
letExpression.put("in", getMappedIn(operationContext));
return new Document("$let", letExpression);
}
private Document getMappedVariable(ExpressionVariable var, AggregationOperationContext context) {
if (var.expression instanceof AggregationExpression expression) {
return new Document(var.variableName, expression.toDocument(context));
}
if (var.expression instanceof Field field) {
return new Document(var.variableName, context.getReference(field).toString());
}
return new Document(var.variableName, var.expression);
return new Document(var.variableName,
var.expression instanceof AggregationExpression ? ((AggregationExpression) var.expression).toDocument(context)
: var.expression);
}
private Object getMappedIn(AggregationOperationContext context) {
@@ -383,10 +373,6 @@ public class VariableOperators {
return new ExpressionVariable(variableName, expression);
}
public ExpressionVariable forField(String fieldRef) {
return new ExpressionVariable(variableName, Fields.field(fieldRef));
}
/**
* Create a new {@link ExpressionVariable} with current name and given {@literal expressionObject}.
*

View File

@@ -868,9 +868,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
dbObjectAccessor.put(prop, null);
}
} else if (!conversions.isSimpleType(value.getClass())) {
writePropertyInternal(value, dbObjectAccessor, prop);
writePropertyInternal(value, dbObjectAccessor, prop, accessor);
} else {
writeSimpleInternal(value, bson, prop);
writeSimpleInternal(value, bson, prop, accessor);
}
}
}
@@ -887,11 +887,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
return;
}
writePropertyInternal(value, dbObjectAccessor, inverseProp);
writePropertyInternal(value, dbObjectAccessor, inverseProp, accessor);
}
@SuppressWarnings({ "unchecked" })
protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor accessor, MongoPersistentProperty prop) {
protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor accessor, MongoPersistentProperty prop, PersistentPropertyAccessor<?> persistentPropertyAccessor) {
if (obj == null) {
return;
@@ -902,7 +902,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
if (conversions.hasValueConverter(prop)) {
accessor.put(prop, conversions.getPropertyValueConversions().getValueConverter(prop).write(obj,
new MongoConversionContext(prop, this)));
new MongoConversionContext(new PropertyValueProvider<MongoPersistentProperty>() {
@Nullable
@Override
public <T> T getPropertyValue(MongoPersistentProperty property) {
return (T) persistentPropertyAccessor.getProperty(property);
}
}, prop, this)));
return;
}
@@ -1234,12 +1240,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
BsonUtils.addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class));
}
private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property) {
private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property, PersistentPropertyAccessor<?> persistentPropertyAccessor) {
DocumentAccessor accessor = new DocumentAccessor(bson);
if (conversions.hasValueConverter(property)) {
accessor.put(property, conversions.getPropertyValueConversions().getValueConverter(property).write(value,
new MongoConversionContext(property, this)));
new MongoConversionContext(new PropertyValueProvider<MongoPersistentProperty>() {
@Nullable
@Override
public <T> T getPropertyValue(MongoPersistentProperty property) {
return (T) persistentPropertyAccessor.getProperty(property);
}
}, property, this)));
return;
}
@@ -1892,7 +1904,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
CustomConversions conversions = context.getCustomConversions();
if (conversions.hasValueConverter(property)) {
return (T) conversions.getPropertyValueConversions().getValueConverter(property).read(value,
new MongoConversionContext(property, context.getSourceConverter()));
new MongoConversionContext(this, property, context.getSourceConverter()));
}
ConversionContext contextToUse = context.forProperty(property);

View File

@@ -17,6 +17,8 @@ package org.springframework.data.mongodb.core.convert;
import org.bson.conversions.Bson;
import org.springframework.data.convert.ValueConversionContext;
import org.springframework.data.mapping.PersistentPropertyAccessor;
import org.springframework.data.mapping.model.PropertyValueProvider;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.util.TypeInformation;
import org.springframework.lang.Nullable;
@@ -29,11 +31,13 @@ import org.springframework.lang.Nullable;
*/
public class MongoConversionContext implements ValueConversionContext<MongoPersistentProperty> {
private final PropertyValueProvider accessor; // TODO: generics
private final MongoPersistentProperty persistentProperty;
private final MongoConverter mongoConverter;
public MongoConversionContext(MongoPersistentProperty persistentProperty, MongoConverter mongoConverter) {
public MongoConversionContext(PropertyValueProvider<?> accessor, MongoPersistentProperty persistentProperty, MongoConverter mongoConverter) {
this.accessor = accessor;
this.persistentProperty = persistentProperty;
this.mongoConverter = mongoConverter;
}
@@ -43,6 +47,10 @@ public class MongoConversionContext implements ValueConversionContext<MongoPersi
return persistentProperty;
}
public Object getValue(String propertyPath) {
return accessor.getPropertyValue(persistentProperty.getOwner().getRequiredPersistentProperty(propertyPath));
}
@Override
public <T> T write(@Nullable Object value, TypeInformation<T> target) {
return (T) mongoConverter.convertToMongoType(value, target);

View File

@@ -437,7 +437,7 @@ public class QueryMapper {
&& converter.getCustomConversions().hasValueConverter(documentField.getProperty())) {
return converter.getCustomConversions().getPropertyValueConversions()
.getValueConverter(documentField.getProperty())
.write(value, new MongoConversionContext(documentField.getProperty(), converter));
.write(value, new MongoConversionContext(null, documentField.getProperty(), converter));
}
if (documentField.isIdField() && !documentField.isAssociation()) {

View File

@@ -24,16 +24,11 @@ import org.springframework.data.geo.Distance;
import org.springframework.data.geo.Metric;
import org.springframework.data.geo.Metrics;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.ReadConcernAware;
import org.springframework.data.mongodb.core.ReadPreferenceAware;
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
/**
* Builder class to build near-queries. <br />
* MongoDB {@code $geoNear} operator allows usage of a {@literal GeoJSON Point} or legacy coordinate pair. Though
@@ -176,7 +171,7 @@ import com.mongodb.ReadPreference;
* @author Christoph Strobl
* @author Mark Paluch
*/
public final class NearQuery implements ReadConcernAware, ReadPreferenceAware {
public final class NearQuery {
private final Point point;
private @Nullable Query query;
@@ -186,8 +181,6 @@ public final class NearQuery implements ReadConcernAware, ReadPreferenceAware {
private boolean spherical;
private @Nullable Long limit;
private @Nullable Long skip;
private @Nullable ReadConcern readConcern;
private @Nullable ReadPreference readPreference;
/**
* Creates a new {@link NearQuery}.
@@ -562,74 +555,6 @@ public final class NearQuery implements ReadConcernAware, ReadPreferenceAware {
return query != null ? query.getCollation().orElse(null) : null;
}
/**
* Configures the query to use the given {@link ReadConcern} unless the underlying {@link #query(Query)}
* {@link Query#hasReadConcern() specifies} another one.
*
* @param readConcern must not be {@literal null}.
* @return this.
* @since 4.1
*/
public NearQuery withReadConcern(ReadConcern readConcern) {
Assert.notNull(readConcern, "ReadConcern must not be null");
this.readConcern = readConcern;
return this;
}
/**
* Configures the query to use the given {@link ReadPreference} unless the underlying {@link #query(Query)}
* {@link Query#hasReadPreference() specifies} another one.
*
* @param readPreference must not be {@literal null}.
* @return this.
* @since 4.1
*/
public NearQuery withReadPreference(ReadPreference readPreference) {
Assert.notNull(readPreference, "ReadPreference must not be null");
this.readPreference = readPreference;
return this;
}
/**
* Get the {@link ReadConcern} to use. Will return the underlying {@link #query(Query) queries}
* {@link Query#getReadConcern() ReadConcern} if present or the one defined on the {@link NearQuery#readConcern}
* itself.
*
* @return can be {@literal null} if none set.
* @since 4.1
* @see ReadConcernAware
*/
@Nullable
@Override
public ReadConcern getReadConcern() {
if (query != null && query.hasReadConcern()) {
return query.getReadConcern();
}
return readConcern;
}
/**
* Get the {@link ReadPreference} to use. Will return the underlying {@link #query(Query) queries}
* {@link Query#getReadPreference() ReadPreference} if present or the one defined on the
* {@link NearQuery#readPreference} itself.
*
* @return can be {@literal null} if none set.
* @since 4.1
* @see ReadPreferenceAware
*/
@Nullable
@Override
public ReadPreference getReadPreference() {
if (query != null && query.hasReadPreference()) {
return query.getReadPreference();
}
return readPreference;
}
/**
* Returns the {@link Document} built by the {@link NearQuery}.
*

View File

@@ -34,16 +34,10 @@ import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Order;
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
import org.springframework.data.mongodb.core.ReadConcernAware;
import org.springframework.data.mongodb.core.ReadPreferenceAware;
import org.springframework.data.mongodb.core.query.Meta.CursorOption;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
/**
* MongoDB Query object representing criteria, projection, sorting and query hints.
*
@@ -54,7 +48,7 @@ import com.mongodb.ReadPreference;
* @author Mark Paluch
* @author Anton Barkan
*/
public class Query implements ReadConcernAware, ReadPreferenceAware {
public class Query {
private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES";
@@ -64,9 +58,6 @@ public class Query implements ReadConcernAware, ReadPreferenceAware {
private Sort sort = Sort.unsorted();
private long skip;
private int limit;
private @Nullable ReadConcern readConcern;
private @Nullable ReadPreference readPreference;
private @Nullable String hint;
private Meta meta = new Meta();
@@ -169,59 +160,6 @@ public class Query implements ReadConcernAware, ReadPreferenceAware {
return this;
}
/**
* Configures the query to use the given {@link ReadConcern} when being executed.
*
* @param readConcern must not be {@literal null}.
* @return this.
* @since 3.1
*/
public Query withReadConcern(ReadConcern readConcern) {
Assert.notNull(readConcern, "ReadConcern must not be null");
this.readConcern = readConcern;
return this;
}
/**
* Configures the query to use the given {@link ReadPreference} when being executed.
*
* @param readPreference must not be {@literal null}.
* @return this.
* @since 4.1
*/
public Query withReadPreference(ReadPreference readPreference) {
Assert.notNull(readPreference, "ReadPreference must not be null");
this.readPreference = readPreference;
return this;
}
@Override
public boolean hasReadConcern() {
return this.readConcern != null;
}
@Override
public ReadConcern getReadConcern() {
return this.readConcern;
}
@Override
public boolean hasReadPreference() {
return this.readPreference != null || getMeta().getFlags().contains(CursorOption.SECONDARY_READS);
}
@Override
public ReadPreference getReadPreference() {
if (readPreference == null) {
return getMeta().getFlags().contains(CursorOption.SECONDARY_READS) ? ReadPreference.primaryPreferred() : null;
}
return this.readPreference;
}
/**
* Configures the query to use the given {@link Document hint} when being executed.
*

View File

@@ -64,7 +64,6 @@ class QueryUtils {
combinedSort.putAll((Document) invocation.proceed());
return combinedSort;
});
factory.setInterfaces(new Class[0]);
return (Query) factory.getProxy(query.getClass().getClassLoader());
}
@@ -114,7 +113,7 @@ class QueryUtils {
if(parameters.isEmpty()) {
return -1;
}
int i = 0;
for(Class<?> parameterType : parameters) {
if(ClassUtils.isAssignable(type, parameterType)) {

View File

@@ -108,7 +108,6 @@ import org.springframework.util.CollectionUtils;
import com.mongodb.MongoClientSettings;
import com.mongodb.MongoException;
import com.mongodb.MongoNamespace;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
import com.mongodb.ServerAddress;
import com.mongodb.ServerCursor;
@@ -121,7 +120,16 @@ import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.*;
import com.mongodb.client.model.CountOptions;
import com.mongodb.client.model.CreateCollectionOptions;
import com.mongodb.client.model.DeleteOptions;
import com.mongodb.client.model.FindOneAndDeleteOptions;
import com.mongodb.client.model.FindOneAndReplaceOptions;
import com.mongodb.client.model.FindOneAndUpdateOptions;
import com.mongodb.client.model.MapReduceAction;
import com.mongodb.client.model.ReplaceOptions;
import com.mongodb.client.model.TimeSeriesGranularity;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
@@ -174,7 +182,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
when(collection.estimatedDocumentCount(any())).thenReturn(1L);
when(collection.getNamespace()).thenReturn(new MongoNamespace("db.mock-collection"));
when(collection.aggregate(any(List.class), any())).thenReturn(aggregateIterable);
when(collection.withReadConcern(any())).thenReturn(collection);
when(collection.withReadPreference(any())).thenReturn(collection);
when(collection.replaceOne(any(), any(), any(ReplaceOptions.class))).thenReturn(updateResult);
when(collection.withWriteConcern(any())).thenReturn(collectionWithWriteConcern);
@@ -471,34 +478,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
verify(collection, never()).withReadPreference(any());
}
@Test // GH-4277
void aggregateShouldHonorOptionsReadConcernWhenSet() {
AggregationOptions options = AggregationOptions.builder().readConcern(ReadConcern.SNAPSHOT).build();
template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class);
verify(collection).withReadConcern(ReadConcern.SNAPSHOT);
}
@Test // GH-4277
void aggregateShouldHonorOptionsReadPreferenceWhenSet() {
AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.secondary()).build();
template.aggregate(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1", Wrapper.class);
verify(collection).withReadPreference(ReadPreference.secondary());
}
@Test // GH-4277
void aggregateStreamShouldHonorOptionsReadPreferenceWhenSet() {
AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.secondary()).build();
template.aggregateStream(newAggregation(Aggregation.unwind("foo")).withOptions(options), "collection-1",
Wrapper.class);
verify(collection).withReadPreference(ReadPreference.secondary());
}
@Test // DATAMONGO-2153
void aggregateShouldHonorOptionsComment() {
@@ -579,28 +558,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
verify(collection).withReadPreference(eq(ReadPreference.secondary()));
}
@Test // GH-4277
void geoNearShouldHonorReadPreferenceFromQuery() {
NearQuery query = NearQuery.near(new Point(1, 1));
query.withReadPreference(ReadPreference.secondary());
template.geoNear(query, Wrapper.class);
verify(collection).withReadPreference(eq(ReadPreference.secondary()));
}
@Test // GH-4277
void geoNearShouldHonorReadConcernFromQuery() {
NearQuery query = NearQuery.near(new Point(1, 1));
query.withReadConcern(ReadConcern.SNAPSHOT);
template.geoNear(query, Wrapper.class);
verify(collection).withReadConcern(eq(ReadConcern.SNAPSHOT));
}
@Test // DATAMONGO-1166, DATAMONGO-2264
void geoNearShouldIgnoreReadPreferenceWhenNotSet() {
@@ -845,24 +802,6 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
verify(findIterable).batchSize(1234);
}
@Test // GH-4277
void findShouldUseReadConcernWhenPresent() {
template.find(new BasicQuery("{'foo' : 'bar'}").withReadConcern(ReadConcern.SNAPSHOT),
AutogenerateableId.class);
verify(collection).withReadConcern(ReadConcern.SNAPSHOT);
}
@Test // GH-4277
void findShouldUseReadPreferenceWhenPresent() {
template.find(new BasicQuery("{'foo' : 'bar'}").withReadPreference(ReadPreference.secondary()),
AutogenerateableId.class);
verify(collection).withReadPreference(ReadPreference.secondary());
}
@Test // DATAMONGO-1518
void executeQueryShouldUseCollationWhenPresent() {
@@ -1109,8 +1048,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1733
void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() {
template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class,
PersonProjection.class,
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonProjection.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(new Document("firstname", 1)));
@@ -1119,8 +1057,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1733
void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() {
template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class,
PersonProjection.class,
template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, PersonProjection.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(new Document("bar", 1)));
@@ -1129,8 +1066,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1733
void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() {
template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class,
PersonSpELProjection.class,
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
@@ -1139,8 +1075,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1733, DATAMONGO-2041
void appliesFieldsToDtoProjection() {
template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class,
Jedi.class,
template.doFind("star-wars", new Document(), new Document(), Person.class, Jedi.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(new Document("firstname", 1)));
@@ -1149,8 +1084,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1733
void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() {
template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class,
Jedi.class,
template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, Jedi.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(new Document("bar", 1)));
@@ -1159,8 +1093,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1733
void doesNotApplyFieldsWhenTargetIsNotAProjection() {
template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class,
Person.class,
template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));
@@ -1169,8 +1102,7 @@ public class MongoTemplateUnitTests extends MongoOperationsUnitTests {
@Test // DATAMONGO-1733
void doesNotApplyFieldsWhenTargetExtendsDomainType() {
template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class,
PersonExtended.class,
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class,
CursorPreparer.NO_OP_PREPARER);
verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT));

View File

@@ -23,6 +23,8 @@ import static org.springframework.data.mongodb.test.util.Assertions.assertThat;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.data.mongodb.core.MongoTemplateUnitTests.Wrapper;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
@@ -54,12 +56,12 @@ import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationListener;
import org.springframework.context.support.StaticApplicationContext;
import org.springframework.data.annotation.Id;
import org.springframework.data.geo.Point;
import org.springframework.data.mapping.MappingException;
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
import org.springframework.data.mapping.context.MappingContext;
@@ -98,7 +100,6 @@ import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.util.CollectionUtils;
import com.mongodb.MongoClientSettings;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
import com.mongodb.client.model.CountOptions;
import com.mongodb.client.model.CreateCollectionOptions;
@@ -167,7 +168,6 @@ public class ReactiveMongoTemplateUnitTests {
when(db.runCommand(any(), any(Class.class))).thenReturn(runCommandPublisher);
when(db.createCollection(any(), any(CreateCollectionOptions.class))).thenReturn(runCommandPublisher);
when(collection.withReadPreference(any())).thenReturn(collection);
when(collection.withReadConcern(any())).thenReturn(collection);
when(collection.find(any(Class.class))).thenReturn(findPublisher);
when(collection.find(any(Document.class), any(Class.class))).thenReturn(findPublisher);
when(collection.aggregate(anyList())).thenReturn(aggregatePublisher);
@@ -385,33 +385,11 @@ public class ReactiveMongoTemplateUnitTests {
verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build()));
}
@Test // GH-4277
void geoNearShouldHonorReadPreferenceFromQuery() {
NearQuery query = NearQuery.near(new Point(1, 1));
query.withReadPreference(ReadPreference.secondary());
template.geoNear(query, Wrapper.class).subscribe();
verify(collection).withReadPreference(eq(ReadPreference.secondary()));
}
@Test // GH-4277
void geoNearShouldHonorReadConcernFromQuery() {
NearQuery query = NearQuery.near(new Point(1, 1));
query.withReadConcern(ReadConcern.SNAPSHOT);
template.geoNear(query, Wrapper.class).subscribe();
verify(collection).withReadConcern(eq(ReadConcern.SNAPSHOT));
}
@Test // DATAMONGO-1719
void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() {
template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class,
PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe();
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonProjection.class,
FindPublisherPreparer.NO_OP_PREPARER).subscribe();
verify(findPublisher).projection(eq(new Document("firstname", 1)));
}
@@ -419,8 +397,8 @@ public class ReactiveMongoTemplateUnitTests {
@Test // DATAMONGO-1719
void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() {
template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class,
PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe();
template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, PersonProjection.class,
FindPublisherPreparer.NO_OP_PREPARER).subscribe();
verify(findPublisher).projection(eq(new Document("bar", 1)));
}
@@ -428,8 +406,8 @@ public class ReactiveMongoTemplateUnitTests {
@Test // DATAMONGO-1719
void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() {
template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class,
PersonSpELProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe();
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class,
FindPublisherPreparer.NO_OP_PREPARER).subscribe();
verify(findPublisher, never()).projection(any());
}
@@ -437,8 +415,8 @@ public class ReactiveMongoTemplateUnitTests {
@Test // DATAMONGO-1719, DATAMONGO-2041
void appliesFieldsToDtoProjection() {
template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class,
Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe();
template.doFind("star-wars", new Document(), new Document(), Person.class, Jedi.class,
FindPublisherPreparer.NO_OP_PREPARER).subscribe();
verify(findPublisher).projection(eq(new Document("firstname", 1)));
}
@@ -446,8 +424,8 @@ public class ReactiveMongoTemplateUnitTests {
@Test // DATAMONGO-1719
void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() {
template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class,
Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe();
template.doFind("star-wars", new Document(), new Document("bar", 1), Person.class, Jedi.class,
FindPublisherPreparer.NO_OP_PREPARER).subscribe();
verify(findPublisher).projection(eq(new Document("bar", 1)));
}
@@ -455,8 +433,8 @@ public class ReactiveMongoTemplateUnitTests {
@Test // DATAMONGO-1719
void doesNotApplyFieldsWhenTargetIsNotAProjection() {
template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class,
Person.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe();
template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class,
FindPublisherPreparer.NO_OP_PREPARER).subscribe();
verify(findPublisher, never()).projection(any());
}
@@ -464,8 +442,8 @@ public class ReactiveMongoTemplateUnitTests {
@Test // DATAMONGO-1719
void doesNotApplyFieldsWhenTargetExtendsDomainType() {
template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class,
PersonExtended.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe();
template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class,
FindPublisherPreparer.NO_OP_PREPARER).subscribe();
verify(findPublisher, never()).projection(any());
}
@@ -654,26 +632,6 @@ public class ReactiveMongoTemplateUnitTests {
verify(aggregatePublisher).collation(eq(com.mongodb.client.model.Collation.builder().locale("de_AT").build()));
}
@Test // GH-4277
void aggreateShouldUseReadConcern() {
AggregationOptions options = AggregationOptions.builder().readConcern(ReadConcern.SNAPSHOT).build();
template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class,
Document.class).subscribe();
verify(collection).withReadConcern(ReadConcern.SNAPSHOT);
}
@Test // GH-4286
void aggreateShouldUseReadReadPreference() {
AggregationOptions options = AggregationOptions.builder().readPreference(ReadPreference.primaryPreferred()).build();
template.aggregate(newAggregation(Sith.class, project("id")).withOptions(options), AutogenerateableId.class,
Document.class).subscribe();
verify(collection).withReadPreference(ReadPreference.primaryPreferred());
}
@Test // DATAMONGO-1854
void aggreateShouldUseCollationFromOptionsEvenIfDefaultCollationIsPresent() {

View File

@@ -43,6 +43,7 @@ import java.util.stream.Stream;
import org.assertj.core.data.Offset;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -60,7 +61,6 @@ import org.springframework.data.mongodb.core.TestEntities;
import org.springframework.data.mongodb.core.Venue;
import org.springframework.data.mongodb.core.aggregation.AggregationTests.CarDescriptor.Entry;
import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.Granularities;
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let;
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable;
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
@@ -90,7 +90,6 @@ import com.mongodb.client.MongoCollection;
* @author Maninder Singh
* @author Sergey Shcherbakov
* @author Minsu Kim
* @author Sangyong Choi
*/
@ExtendWith(MongoTemplateExtension.class)
public class AggregationTests {
@@ -500,7 +499,7 @@ public class AggregationTests {
/*
//complex mongodb aggregation framework example from
https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state
db.zipcodes.aggregate(
{
$group: {
@@ -1519,47 +1518,8 @@ public class AggregationTests {
assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1");
}
@Test // GH-3322
@EnableIfMongoServerVersion(isGreaterThanEqual = "5.0")
void shouldLookupPeopleCorrectlyWithPipeline() {
createUsersWithReferencedPersons();
TypedAggregation<User> agg = newAggregation(User.class, //
lookup().from("person").localField("_id").foreignField("firstname").pipeline(match(where("firstname").is("u1"))).as("linkedPerson"), //
sort(ASC, "id"));
AggregationResults<Document> results = mongoTemplate.aggregate(agg, User.class, Document.class);
List<Document> mappedResults = results.getMappedResults();
Document firstItem = mappedResults.get(0);
assertThat(firstItem).containsEntry("_id", "u1");
assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1");
}
@Test // GH-3322
@EnableIfMongoServerVersion(isGreaterThanEqual = "5.0")
void shouldLookupPeopleCorrectlyWithPipelineAndLet() {
createUsersWithReferencedPersons();
TypedAggregation<User> agg = newAggregation(User.class, //
lookup().from("person").localField("_id").foreignField("firstname").let(Let.ExpressionVariable.newVariable("the_id").forField("_id")).pipeline(
match(ctx -> new Document("$expr", new Document("$eq", List.of("$$the_id", "u1"))))).as("linkedPerson"),
sort(ASC, "id"));
AggregationResults<Document> results = mongoTemplate.aggregate(agg, User.class, Document.class);
List<Document> mappedResults = results.getMappedResults();
Document firstItem = mappedResults.get(0);
assertThat(firstItem).containsEntry("_id", "u1");
assertThat(firstItem).containsEntry("linkedPerson.[0].firstname", "u1");
}
@Test // DATAMONGO-1326
void shouldGroupByAndLookupPeopleCorrectly() {
void shouldGroupByAndLookupPeopleCorectly() {
createUsersWithReferencedPersons();

View File

@@ -16,16 +16,11 @@
package org.springframework.data.mongodb.core.aggregation;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.*;
import static org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable.*;
import static org.springframework.data.mongodb.test.util.Assertions.assertThat;
import java.util.List;
import org.bson.Document;
import org.junit.jupiter.api.Test;
import org.springframework.data.mongodb.core.DocumentTestUtils;
import org.springframework.data.mongodb.core.query.Criteria;
/**
* Unit tests for {@link LookupOperation}.
@@ -67,7 +62,7 @@ public class LookupOperationUnitTests {
Document lookupClause = extractDocumentFromLookupOperation(lookupOperation);
org.assertj.core.api.Assertions.assertThat(lookupClause).containsEntry("from", "a") //
assertThat(lookupClause).containsEntry("from", "a") //
.containsEntry("localField", "b") //
.containsEntry("foreignField", "c") //
.containsEntry("as", "d");
@@ -119,7 +114,7 @@ public class LookupOperationUnitTests {
Document lookupClause = extractDocumentFromLookupOperation(lookupOperation);
org.assertj.core.api.Assertions.assertThat(lookupClause).containsEntry("from", "a") //
assertThat(lookupClause).containsEntry("from", "a") //
.containsEntry("localField", "b") //
.containsEntry("foreignField", "c") //
.containsEntry("as", "d");
@@ -134,86 +129,4 @@ public class LookupOperationUnitTests {
assertThat(lookupOperation.getFields().exposesSingleFieldOnly()).isTrue();
assertThat(lookupOperation.getFields().getField("d")).isNotNull();
}
@Test // GH-3322
void buildsLookupWithLetAndPipeline() {
LookupOperation lookupOperation = LookupOperation.newLookup().from("warehouses")
.let(newVariable("order_item").forField("item"), newVariable("order_qty").forField("ordered"))
.pipeline(match(ctx -> new Document("$expr",
new Document("$and", List.of(Document.parse("{ $eq: [ \"$stock_item\", \"$$order_item\" ] }"),
Document.parse("{ $gte: [ \"$instock\", \"$$order_qty\" ] }"))))))
.as("stockdata");
assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("""
{ $lookup: {
from: "warehouses",
let: { order_item: "$item", order_qty: "$ordered" },
pipeline: [
{ $match:
{ $expr:
{ $and:
[
{ $eq: [ "$stock_item", "$$order_item" ] },
{ $gte: [ "$instock", "$$order_qty" ] }
]
}
}
}
],
as: "stockdata"
}}
""");
}
@Test // GH-3322
void buildsLookupWithJustPipeline() {
LookupOperation lookupOperation = LookupOperation.newLookup().from("holidays") //
.pipeline( //
match(Criteria.where("year").is(2018)), //
project().andExclude("_id").and(ctx -> new Document("name", "$name").append("date", "$date")).as("date"), //
Aggregation.replaceRoot("date") //
).as("holidays");
assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("""
{ $lookup:
{
from: "holidays",
pipeline: [
{ $match: { year: 2018 } },
{ $project: { _id: 0, date: { name: "$name", date: "$date" } } },
{ $replaceRoot: { newRoot: "$date" } }
],
as: "holidays"
}
}}
""");
}
@Test // GH-3322
void buildsLookupWithLocalAndForeignFieldAsWellAsLetAndPipeline() {
LookupOperation lookupOperation = Aggregation.lookup().from("restaurants") //
.localField("restaurant_name")
.foreignField("name")
.let(newVariable("orders_drink").forField("drink")) //
.pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages")))))
.as("matches");
assertThat(lookupOperation.toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("""
{ $lookup: {
from: "restaurants",
localField: "restaurant_name",
foreignField: "name",
let: { orders_drink: "$drink" },
pipeline: [{
$match: {
$expr: { $in: [ "$$orders_drink", "$beverages" ] }
}
}],
as: "matches"
}}
""");
}
}

View File

@@ -2613,7 +2613,7 @@ class MappingMongoConverterUnitTests {
doReturn(Person.class).when(persistentProperty).getType();
doReturn(Person.class).when(persistentProperty).getRawType();
converter.writePropertyInternal(sourceValue, accessor, persistentProperty);
converter.writePropertyInternal(sourceValue, accessor, persistentProperty, null);
assertThat(accessor.getDocument())
.isEqualTo(new org.bson.Document("pName", new org.bson.Document("_id", id.toString())));

View File

@@ -21,6 +21,9 @@ import static org.springframework.data.mongodb.core.messaging.SubscriptionUtils.
import static org.springframework.data.mongodb.core.query.Criteria.*;
import static org.springframework.data.mongodb.core.query.Query.*;
import com.mongodb.client.model.ChangeStreamPreAndPostImagesOptions;
import com.mongodb.client.model.CreateCollectionOptions;
import com.mongodb.client.model.changestream.FullDocumentBeforeChange;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@@ -39,10 +42,8 @@ import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junitpioneer.jupiter.RepeatFailedTest;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.ChangeStreamOptions;
import org.springframework.data.mongodb.core.CollectionOptions;
@@ -61,7 +62,7 @@ import org.springframework.data.mongodb.test.util.Template;
import com.mongodb.client.model.changestream.ChangeStreamDocument;
import com.mongodb.client.model.changestream.FullDocument;
import com.mongodb.client.model.changestream.FullDocumentBeforeChange;
import org.junitpioneer.jupiter.RepeatFailedTest;
/**
* Integration test for subscribing to a {@link com.mongodb.operation.ChangeStreamBatchCursor} inside the
@@ -701,7 +702,7 @@ class ChangeStreamTests {
}
@Test // GH-4187
@Disabled("Flakey test failing occasionally due to timing issues")
@EnableIfMongoServerVersion(isLessThan = "6.0")
void readsFullDocumentBeforeChangeWhenOptionDeclaredRequiredAndMongoVersionIsLessThan6() throws InterruptedException {
CollectingMessageListener<ChangeStreamDocument<Document>, User> messageListener = new CollectingMessageListener<>();

View File

@@ -29,10 +29,6 @@ import org.springframework.data.geo.Metrics;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.DocumentTestUtils;
import org.springframework.data.mongodb.core.geo.GeoJsonPoint;
import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
/**
* Unit tests for {@link NearQuery}.
@@ -233,58 +229,4 @@ public class NearQueryUnitTests {
assertThat(query.toDocument()).containsEntry("maxDistance", 1000D).containsEntry("distanceMultiplier", 0.00062137D);
}
@Test // GH-4277
void fetchesReadPreferenceFromUnderlyingQueryObject() {
NearQuery nearQuery = NearQuery.near(new Point(0, 0))
.query(new Query().withReadPreference(ReadPreference.nearest()));
assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.nearest());
}
@Test // GH-4277
void fetchesReadConcernFromUnderlyingQueryObject() {
NearQuery nearQuery = NearQuery.near(new Point(0, 0)).query(new Query().withReadConcern(ReadConcern.SNAPSHOT));
assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.SNAPSHOT);
}
@Test // GH-4277
void usesReadPreferenceFromNearQueryIfUnderlyingQueryDoesNotDefineAny() {
NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadPreference(ReadPreference.nearest())
.query(new Query());
assertThat(((Query) ReflectionTestUtils.getField(nearQuery, "query")).getReadPreference()).isNull();
assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.nearest());
}
@Test // GH-4277
void usesReadConcernFromNearQueryIfUnderlyingQueryDoesNotDefineAny() {
NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadConcern(ReadConcern.SNAPSHOT).query(new Query());
assertThat(((Query) ReflectionTestUtils.getField(nearQuery, "query")).getReadConcern()).isNull();
assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.SNAPSHOT);
}
@Test // GH-4277
void readPreferenceFromUnderlyingQueryOverridesNearQueryOne() {
NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadPreference(ReadPreference.nearest())
.query(new Query().withReadPreference(ReadPreference.primary()));
assertThat(nearQuery.getReadPreference()).isEqualTo(ReadPreference.primary());
}
@Test // GH-4277
void readConcernFromUnderlyingQueryOverridesNearQueryOne() {
NearQuery nearQuery = NearQuery.near(new Point(0, 0)).withReadConcern(ReadConcern.SNAPSHOT)
.query(new Query().withReadConcern(ReadConcern.MAJORITY));
assertThat(nearQuery.getReadConcern()).isEqualTo(ReadConcern.MAJORITY);
}
}

View File

@@ -342,10 +342,7 @@ class QueryTests {
source.limit(10);
source.setSortObject(new Document("_id", 1));
ProxyFactory proxyFactory = new ProxyFactory(source);
proxyFactory.setInterfaces(new Class[0]);
Query target = Query.of((Query) proxyFactory.getProxy());
Query target = Query.of((Query) new ProxyFactory(source).getProxy());
compareQueries(target, source);
}

View File

@@ -0,0 +1,528 @@
/*
* Copyright 2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.fle;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.security.SecureRandom;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.bson.BsonArray;
import org.bson.BsonBinary;
import org.bson.BsonDocument;
import org.bson.BsonValue;
import org.bson.Document;
import org.bson.types.Binary;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.CollectionFactory;
import org.springframework.core.annotation.AliasFor;
import org.springframework.dao.PermissionDeniedDataAccessException;
import org.springframework.data.convert.PropertyValueConverterFactory;
import org.springframework.data.convert.ValueConverter;
import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.convert.MongoConversionContext;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
import org.springframework.data.mongodb.core.convert.MongoValueConverter;
import org.springframework.data.mongodb.core.mapping.Encrypted;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.fle.FLETests.Config;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.data.util.Lazy;
import org.springframework.lang.Nullable;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import com.mongodb.ClientEncryptionSettings;
import com.mongodb.ConnectionString;
import com.mongodb.MongoClientSettings;
import com.mongodb.MongoNamespace;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.Indexes;
import com.mongodb.client.model.vault.DataKeyOptions;
import com.mongodb.client.model.vault.EncryptOptions;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.vault.ClientEncryption;
import com.mongodb.client.vault.ClientEncryptions;
/**
* @author Christoph Strobl
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = Config.class)
public class FLETests {
@Autowired MongoTemplate template;
@Test
void manualEnAndDecryption() {
Person person = new Person();
person.id = "id-1";
person.name = "p1-name";
person.ssn = "mySecretSSN"; // determinisitc encryption (queryable)
person.wallet = "myEvenMoreSecretStuff"; // random encryption (non queryable)
// nested full document encryption
person.address = new Address();
person.address.city = "NYC";
person.address.street = "4th Ave.";
person.encryptedZip = new AddressWithEncryptedZip();
person.encryptedZip.city = "Boston";
person.encryptedZip.street = "central square";
person.encryptedZip.zip = "1234567890";
person.listOfString = Arrays.asList("spring", "data", "mongodb");
Address partOfList = new Address();
partOfList.city = "SFO";
partOfList.street = "---";
person.listOfComplex = Collections.singletonList(partOfList);
template.save(person);
System.out.println("source: " + person);
Document savedDocument = template.execute(Person.class, collection -> {
return collection.find(new Document()).first();
});
// ssn should look like "ssn": {"$binary": {"base64": "...
System.out.println("saved: " + savedDocument.toJson());
assertThat(savedDocument.get("ssn")).isInstanceOf(Binary.class);
assertThat(savedDocument.get("wallet")).isInstanceOf(Binary.class);
assertThat(savedDocument.get("encryptedZip")).isInstanceOf(Document.class);
assertThat(savedDocument.get("encryptedZip", Document.class).get("zip")).isInstanceOf(Binary.class);
assertThat(savedDocument.get("address")).isInstanceOf(Binary.class);
assertThat(savedDocument.get("listOfString")).isInstanceOf(Binary.class);
assertThat(savedDocument.get("listOfComplex")).isInstanceOf(Binary.class);
// count should be 1 using a deterministic algorithm
long queryCount = template.query(Person.class).matching(where("ssn").is(person.ssn)).count();
System.out.println("query(count): " + queryCount);
assertThat(queryCount).isOne();
Person bySsn = template.query(Person.class).matching(where("ssn").is(person.ssn)).firstValue();
System.out.println("queryable: " + bySsn);
assertThat(bySsn).isEqualTo(person);
Person byWallet = template.query(Person.class).matching(where("wallet").is(person.wallet)).firstValue();
System.out.println("not-queryable: " + byWallet);
assertThat(byWallet).isNull();
}
@Test
void theUpdateStuff() {
Person person = new Person();
person.id = "id-1";
person.name = "p1-name";
template.save(person);
Document savedDocument = template.execute(Person.class, collection -> {
return collection.find(new Document()).first();
});
System.out.println("saved: " + savedDocument.toJson());
template.update(Person.class).matching(where("id").is(person.id)).apply(Update.update("ssn", "secret-value")).first();
savedDocument = template.execute(Person.class, collection -> {
return collection.find(new Document()).first();
});
System.out.println("updated: " + savedDocument.toJson());
assertThat(savedDocument.get("ssn")).isInstanceOf(Binary.class);
}
@Test
void altKeyDetection(@Autowired ClientEncryption clientEncryption) throws InterruptedException {
BsonBinary user1key = clientEncryption.createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("user-1")));
BsonBinary user2key = clientEncryption.createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("user-2")));
Person p1 = new Person();
p1.id = "id-1";
p1.name = "user-1";
p1.ssn = "ssn";
p1.viaAltKeyNameField = "value-1";
Person p2 = new Person();
p2.id = "id-2";
p2.name = "user-2";
p2.viaAltKeyNameField = "value-1";
Person p3 = new Person();
p3.id = "id-3";
p3.name = "user-1";
p3.viaAltKeyNameField = "value-1";
template.save(p1);
template.save(p2);
template.save(p3);
template.execute(Person.class, collection -> {
collection.find(new Document()).forEach(it -> System.out.println(it.toJson()));
return null;
});
// System.out.println(template.query(Person.class).matching(where("id").is(p1.id)).firstValue());
// System.out.println(template.query(Person.class).matching(where("id").is(p2.id)).firstValue());
DeleteResult deleteResult = clientEncryption.deleteKey(user2key);
clientEncryption.getKeys().forEach(System.out::println);
System.out.println("deleteResult: " + deleteResult);
System.out.println("---- waiting for cache timeout ----");
TimeUnit.SECONDS.sleep(90);
assertThat(template.query(Person.class).matching(where("id").is(p1.id)).firstValue()).isEqualTo(p1);
assertThatExceptionOfType(PermissionDeniedDataAccessException.class)
.isThrownBy(() -> template.query(Person.class).matching(where("id").is(p2.id)).firstValue());
}
@Configuration
static class Config extends AbstractMongoClientConfiguration {
@Autowired ApplicationContext applicationContext;
@Override
protected String getDatabaseName() {
return "fle-test";
}
@Bean
public MongoClient mongoClient() {
return super.mongoClient();
}
@Override
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
converterConfigurationAdapter
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext));
}
@Bean
EncryptingConverter encryptingConverter(ClientEncryption clientEncryption) {
return new EncryptingConverter(clientEncryption);
}
@Bean
ClientEncryption clientEncryption(MongoClient mongoClient) {
final byte[] localMasterKey = new byte[96];
new SecureRandom().nextBytes(localMasterKey);
Map<String, Map<String, Object>> kmsProviders = new HashMap<String, Map<String, Object>>() {
{
put("local", new HashMap<String, Object>() {
{
put("key", localMasterKey);
}
});
}
};
MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault");
MongoCollection<Document> keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName())
.getCollection(keyVaultNamespace.getCollectionName());
keyVaultCollection.drop();
// Ensure that two data keys cannot share the same keyAltName.
keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"),
new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames")));
MongoCollection<Document> collection = mongoClient.getDatabase(getDatabaseName()).getCollection("test");
collection.drop(); // Clear old data
// Create the ClientEncryption instance
ClientEncryptionSettings clientEncryptionSettings = ClientEncryptionSettings.builder()
.keyVaultMongoClientSettings(
MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build())
.keyVaultNamespace(keyVaultNamespace.getFullName()).kmsProviders(kmsProviders).build();
ClientEncryption clientEncryption = ClientEncryptions.create(clientEncryptionSettings);
return clientEncryption;
}
}
@Data
@org.springframework.data.mongodb.core.mapping.Document("test")
static class Person {
String id;
String name;
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) //
String ssn;
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, altKeyName = "mySuperSecretKey") //
String wallet;
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // full document must be random
Address address;
AddressWithEncryptedZip encryptedZip;
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random
List<String> listOfString;
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) // lists must be random
List<Address> listOfComplex;
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random, altKeyName = "/name") //
String viaAltKeyNameField;
}
@Data
static class Address {
String city;
String street;
}
@Getter
@Setter
static class AddressWithEncryptedZip extends Address {
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Random) String zip;
@Override
public String toString() {
return "AddressWithEncryptedZip{" + "zip='" + zip + '\'' + ", city='" + getCity() + '\'' + ", street='"
+ getStreet() + '\'' + '}';
}
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
@Encrypted
@ValueConverter(EncryptingConverter.class)
@interface EncryptedField {
@AliasFor(annotation = Encrypted.class, value = "algorithm")
String algorithm() default "";
String altKeyName() default "";
}
static class EncryptingConverter implements MongoValueConverter<Object, Object> {
private ClientEncryption clientEncryption;
private BsonBinary dataKeyId; // should be provided from outside.
public EncryptingConverter(ClientEncryption clientEncryption) {
this.clientEncryption = clientEncryption;
this.dataKeyId = clientEncryption.createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("mySuperSecretKey")));
}
@Nullable
@Override
public Object read(Object value, MongoConversionContext context) {
ManualEncryptionContext encryptionContext = buildEncryptionContext(context);
Object decrypted = encryptionContext.decrypt(value, clientEncryption);
return decrypted instanceof BsonValue ? BsonUtils.toJavaType((BsonValue) decrypted) : decrypted;
}
@Nullable
@Override
public BsonBinary write(Object value, MongoConversionContext context) {
ManualEncryptionContext encryptionContext = buildEncryptionContext(context);
return encryptionContext.encrypt(value, clientEncryption);
}
ManualEncryptionContext buildEncryptionContext(MongoConversionContext context) {
return new ManualEncryptionContext(context, this.dataKeyId);
}
}
static class ManualEncryptionContext {
MongoConversionContext context;
MongoPersistentProperty persistentProperty;
BsonBinary dataKeyId;
Lazy<Encrypted> encryption;
public ManualEncryptionContext(MongoConversionContext context, BsonBinary dataKeyId) {
this.context = context;
this.persistentProperty = context.getProperty();
this.dataKeyId = dataKeyId;
this.encryption = Lazy.of(() -> persistentProperty.findAnnotation(Encrypted.class));
}
BsonBinary encrypt(Object value, ClientEncryption clientEncryption) {
// TODO: check - encryption.get().keyId()
EncryptOptions encryptOptions = new EncryptOptions(encryption.get().algorithm());
EncryptedField annotation = persistentProperty.findAnnotation(EncryptedField.class);
if (annotation != null && !annotation.altKeyName().isBlank()) {
if (annotation.altKeyName().startsWith("/")) {
String fieldName = annotation.altKeyName().replace("/", "");
Object altKeyNameValue = context.getValue(fieldName);
encryptOptions = encryptOptions.keyAltName(altKeyNameValue.toString());
} else {
encryptOptions = encryptOptions.keyAltName(annotation.altKeyName());
}
} else {
encryptOptions = encryptOptions.keyId(this.dataKeyId);
}
System.out.println(
"encrypting with: " + (StringUtils.hasText(encryptOptions.getKeyAltName()) ? encryptOptions.getKeyAltName()
: encryptOptions.getKeyId()));
if (!persistentProperty.isEntity()) {
if (persistentProperty.isCollectionLike()) {
return clientEncryption.encrypt(collectionLikeToBsonValue(value), encryptOptions);
}
return clientEncryption.encrypt(BsonUtils.simpleToBsonValue(value), encryptOptions);
}
if (persistentProperty.isCollectionLike()) {
return clientEncryption.encrypt(collectionLikeToBsonValue(value), encryptOptions);
}
Object write = context.write(value);
if (write instanceof Document doc) {
return clientEncryption.encrypt(doc.toBsonDocument(), encryptOptions);
}
return clientEncryption.encrypt(BsonUtils.simpleToBsonValue(write), encryptOptions);
}
public BsonValue collectionLikeToBsonValue(Object value) {
if (persistentProperty.isCollectionLike()) {
BsonArray bsonArray = new BsonArray();
if (!persistentProperty.isEntity()) {
if (value instanceof Collection values) {
values.forEach(it -> bsonArray.add(BsonUtils.simpleToBsonValue(it)));
} else if (ObjectUtils.isArray(value)) {
for (Object o : ObjectUtils.toObjectArray(value)) {
bsonArray.add(BsonUtils.simpleToBsonValue(o));
}
}
return bsonArray;
} else {
if (value instanceof Collection values) {
values.forEach(it -> {
Document write = (Document) context.write(it, persistentProperty.getTypeInformation());
bsonArray.add(write.toBsonDocument());
});
} else if (ObjectUtils.isArray(value)) {
for (Object o : ObjectUtils.toObjectArray(value)) {
Document write = (Document) context.write(0, persistentProperty.getTypeInformation());
bsonArray.add(write.toBsonDocument());
}
}
return bsonArray;
}
}
if (!persistentProperty.isEntity()) {
if (persistentProperty.isCollectionLike()) {
if (persistentProperty.isEntity()) {
}
}
}
return null;
}
public Object decrypt(Object value, ClientEncryption clientEncryption) {
// this was a hack to avoid the 60 sec timeout of the key cache
// ClientEncryptionSettings settings = (ClientEncryptionSettings) new DirectFieldAccessor(clientEncryption)
// .getPropertyValue("options");
// clientEncryption = ClientEncryptions.create(settings);
Object result = value;
if (value instanceof Binary binary) {
result = clientEncryption.decrypt(new BsonBinary(binary.getType(), binary.getData()));
}
if (value instanceof BsonBinary binary) {
result = clientEncryption.decrypt(binary);
}
// in case the driver has auto decryption (aka .bypassAutoEncryption(true)) active
// https://github.com/mongodb/mongo-java-driver/blob/master/driver-sync/src/examples/tour/ClientSideEncryptionExplicitEncryptionOnlyTour.java
if (value == result) {
return result;
}
if (persistentProperty.isCollectionLike() && result instanceof Iterable<?> iterable) {
if (!persistentProperty.isEntity()) {
Collection<Object> collection = CollectionFactory.createCollection(persistentProperty.getType(), 10);
iterable.forEach(it -> collection.add(BsonUtils.toJavaType((BsonValue) it)));
return collection;
} else {
Collection<Object> collection = CollectionFactory.createCollection(persistentProperty.getType(), 10);
iterable.forEach(it -> {
collection.add(context.read(BsonUtils.toJavaType((BsonValue) it), persistentProperty.getActualType()));
});
return collection;
}
}
if (!persistentProperty.isEntity() && result instanceof BsonValue bsonValue) {
return BsonUtils.toJavaType(bsonValue);
}
if (persistentProperty.isEntity() && result instanceof BsonDocument bsonDocument) {
return context.read(BsonUtils.toJavaType(bsonDocument), persistentProperty.getTypeInformation());
}
return result;
}
}
}

View File

@@ -0,0 +1,440 @@
/*
* Copyright 2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.fle;
import static org.assertj.core.api.Assertions.*;
import static org.springframework.data.mongodb.core.EncryptionAlgorithms.*;
import static org.springframework.data.mongodb.core.query.Criteria.*;
import lombok.Data;
import org.springframework.data.mongodb.fle.FLETests.Person;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.security.SecureRandom;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
import java.util.function.Supplier;
import org.bson.BsonArray;
import org.bson.BsonBinary;
import org.bson.BsonDocument;
import org.bson.BsonValue;
import org.bson.Document;
import org.bson.types.Binary;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.reactivestreams.Publisher;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.CollectionFactory;
import org.springframework.core.annotation.AliasFor;
import org.springframework.data.convert.PropertyValueConverterFactory;
import org.springframework.data.convert.ValueConverter;
import org.springframework.data.mongodb.config.AbstractReactiveMongoConfiguration;
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
import org.springframework.data.mongodb.core.convert.MongoConversionContext;
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
import org.springframework.data.mongodb.core.convert.MongoValueConverter;
import org.springframework.data.mongodb.core.mapping.Encrypted;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.fle.FLETests.Config;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.data.util.Lazy;
import org.springframework.lang.Nullable;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
import com.mongodb.ClientEncryptionSettings;
import com.mongodb.ConnectionString;
import com.mongodb.MongoClientSettings;
import com.mongodb.MongoNamespace;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.Indexes;
import com.mongodb.client.model.vault.DataKeyOptions;
import com.mongodb.client.model.vault.EncryptOptions;
import com.mongodb.reactivestreams.client.MongoClient;
import com.mongodb.reactivestreams.client.MongoCollection;
import com.mongodb.reactivestreams.client.vault.ClientEncryption;
import com.mongodb.reactivestreams.client.vault.ClientEncryptions;
/**
* @author Christoph Strobl
* @since 2022/11
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = ReactiveFLETests.Config.class)
public class ReactiveFLETests {
ClientEncryption encryption;
@Test
void xxx() {
Supplier<String> valueSupplier = new Supplier<String>() {
@Override
public String get() {
System.out.println("invoked");
return "v1";
}
};
Document source = new Document("name", "value").append("mono", Mono.fromSupplier(() -> "from mono"))
.append("nested", new Document("n1", Mono.fromSupplier(() -> "from nested mono")));
resolveValues(Mono.just(source)) //
.as(StepVerifier::create).consumeNextWith(resolved -> {
assertThat(resolved).isEqualTo(Document
.parse("{\"name\": \"value\", \"mono\": \"from mono\", \"nested\" : { \"n1\" : \"from nested mono\"}}"));
}).verifyComplete();
}
private Mono<Document> resolveValues(Mono<Document> document) {
return document.flatMap(source -> {
for (Entry<String, Object> entry : source.entrySet()) {
if (entry.getValue()instanceof Mono<?> valueMono) {
return valueMono.flatMap(value -> {
source.put(entry.getKey(), value);
return resolveValues(Mono.just(source));
});
}
if (entry.getValue()instanceof Document nested) {
return resolveValues(Mono.just(nested)).map(it -> {
source.put(entry.getKey(), it);
return source;
});
}
}
return Mono.just(source);
});
}
@Autowired ReactiveMongoTemplate template;
@Test
void manualEnAndDecryption() {
Person person = new Person();
person.id = "id-1";
person.name = "p1-name";
person.ssn = "mySecretSSN";
template.save(person).block();
System.out.println("source: " + person);
Flux<Document> result = template.execute(FLETests.Person.class, collection -> {
return Mono.from(collection.find(new Document()).first());
});
System.out.println("encrypted: " + result.blockFirst().toJson());
Person id = template.query(Person.class).matching(where("id").is(person.id)).first().block();
System.out.println("decrypted: " + id);
}
@Data
@org.springframework.data.mongodb.core.mapping.Document("test")
static class Person {
String id;
String name;
@EncryptedField(algorithm = AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic) //
String ssn;
}
@Configuration
static class Config extends AbstractReactiveMongoConfiguration {
@Autowired ApplicationContext applicationContext;
@Override
protected String getDatabaseName() {
return "fle-test";
}
@Override
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
converterConfigurationAdapter
.registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext));
}
@Bean
@Override
public MongoClient reactiveMongoClient() {
return super.reactiveMongoClient();
}
@Bean
ReactiveEncryptingConverter encryptingConverter(ClientEncryption clientEncryption) {
return new ReactiveEncryptingConverter(clientEncryption);
}
@Bean
ClientEncryption clientEncryption(MongoClient mongoClient) {
final byte[] localMasterKey = new byte[96];
new SecureRandom().nextBytes(localMasterKey);
Map<String, Map<String, Object>> kmsProviders = new HashMap<String, Map<String, Object>>() {
{
put("local", new HashMap<String, Object>() {
{
put("key", localMasterKey);
}
});
}
};
MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault");
MongoCollection<Document> keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName())
.getCollection(keyVaultNamespace.getCollectionName());
Mono.from(keyVaultCollection.drop()).block();
// Ensure that two data keys cannot share the same keyAltName.
keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"),
new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames")));
MongoCollection<Document> collection = mongoClient.getDatabase(getDatabaseName()).getCollection("test");
Mono.from(collection.drop()).block(); // Clear old data
// Create the ClientEncryption instance
ClientEncryptionSettings clientEncryptionSettings = ClientEncryptionSettings.builder()
.keyVaultMongoClientSettings(
MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build())
.keyVaultNamespace(keyVaultNamespace.getFullName()).kmsProviders(kmsProviders).build();
ClientEncryption clientEncryption = ClientEncryptions.create(clientEncryptionSettings);
return clientEncryption;
}
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
@Encrypted
@ValueConverter(ReactiveEncryptingConverter.class)
@interface EncryptedField {
@AliasFor(annotation = Encrypted.class, value = "algorithm")
String algorithm() default "";
String altKeyName() default "";
}
static class ReactiveEncryptingConverter implements MongoValueConverter<Object, Object> {
private ClientEncryption clientEncryption;
private BsonBinary dataKeyId; // should be provided from outside.
public ReactiveEncryptingConverter(ClientEncryption clientEncryption) {
this.clientEncryption = clientEncryption;
this.dataKeyId = Mono.from(clientEncryption.createDataKey("local",
new DataKeyOptions().keyAltNames(Collections.singletonList("mySuperSecretKey")))).block();
}
@Nullable
@Override
public Object read(Object value, MongoConversionContext context) {
ManualEncryptionContext encryptionContext = buildEncryptionContext(context);
Object decrypted = null;
try {
decrypted = encryptionContext.decrypt(value, clientEncryption);
} catch (ExecutionException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return decrypted instanceof BsonValue ? BsonUtils.toJavaType((BsonValue) decrypted) : decrypted;
}
@Nullable
@Override
public Publisher<BsonBinary> write(Object value, MongoConversionContext context) {
ManualEncryptionContext encryptionContext = buildEncryptionContext(context);
return encryptionContext.encrypt(value, clientEncryption);
}
ManualEncryptionContext buildEncryptionContext(MongoConversionContext context) {
return new ManualEncryptionContext(context, this.dataKeyId);
}
}
static class ManualEncryptionContext {
MongoConversionContext context;
MongoPersistentProperty persistentProperty;
BsonBinary dataKeyId;
Lazy<Encrypted> encryption;
public ManualEncryptionContext(MongoConversionContext context, BsonBinary dataKeyId) {
this.context = context;
this.persistentProperty = context.getProperty();
this.dataKeyId = dataKeyId;
this.encryption = Lazy.of(() -> persistentProperty.findAnnotation(Encrypted.class));
}
Publisher<BsonBinary> encrypt(Object value, ClientEncryption clientEncryption) {
// TODO: check - encryption.get().keyId()
EncryptOptions encryptOptions = new EncryptOptions(encryption.get().algorithm());
EncryptedField annotation = persistentProperty.findAnnotation(EncryptedField.class);
if (annotation != null && !annotation.altKeyName().isBlank()) {
if (annotation.altKeyName().startsWith("/")) {
String fieldName = annotation.altKeyName().replace("/", "");
Object altKeyNameValue = context.getValue(fieldName);
encryptOptions = encryptOptions.keyAltName(altKeyNameValue.toString());
} else {
encryptOptions = encryptOptions.keyAltName(annotation.altKeyName());
}
} else {
encryptOptions = encryptOptions.keyId(this.dataKeyId);
}
System.out.println(
"encrypting with: " + (StringUtils.hasText(encryptOptions.getKeyAltName()) ? encryptOptions.getKeyAltName()
: encryptOptions.getKeyId()));
if (!persistentProperty.isEntity()) {
if (persistentProperty.isCollectionLike()) {
return clientEncryption.encrypt(collectionLikeToBsonValue(value), encryptOptions);
}
return clientEncryption.encrypt(BsonUtils.simpleToBsonValue(value), encryptOptions);
}
if (persistentProperty.isCollectionLike()) {
return clientEncryption.encrypt(collectionLikeToBsonValue(value), encryptOptions);
}
Object write = context.write(value);
if (write instanceof Document doc) {
return clientEncryption.encrypt(doc.toBsonDocument(), encryptOptions);
}
return clientEncryption.encrypt(BsonUtils.simpleToBsonValue(write), encryptOptions);
}
public BsonValue collectionLikeToBsonValue(Object value) {
if (persistentProperty.isCollectionLike()) {
BsonArray bsonArray = new BsonArray();
if (!persistentProperty.isEntity()) {
if (value instanceof Collection values) {
values.forEach(it -> bsonArray.add(BsonUtils.simpleToBsonValue(it)));
} else if (ObjectUtils.isArray(value)) {
for (Object o : ObjectUtils.toObjectArray(value)) {
bsonArray.add(BsonUtils.simpleToBsonValue(o));
}
}
return bsonArray;
} else {
if (value instanceof Collection values) {
values.forEach(it -> {
Document write = (Document) context.write(it, persistentProperty.getTypeInformation());
bsonArray.add(write.toBsonDocument());
});
} else if (ObjectUtils.isArray(value)) {
for (Object o : ObjectUtils.toObjectArray(value)) {
Document write = (Document) context.write(o, persistentProperty.getTypeInformation());
bsonArray.add(write.toBsonDocument());
}
}
return bsonArray;
}
}
if (!persistentProperty.isEntity()) {
if (persistentProperty.isCollectionLike()) {
if (persistentProperty.isEntity()) {
}
}
}
return null;
}
public Object decrypt(Object value, ClientEncryption clientEncryption) throws ExecutionException, InterruptedException {
// this was a hack to avoid the 60 sec timeout of the key cache
// ClientEncryptionSettings settings = (ClientEncryptionSettings) new DirectFieldAccessor(clientEncryption)
// .getPropertyValue("options");
// clientEncryption = ClientEncryptions.create(settings);
Object r = value;
if (value instanceof Binary binary) {
r = clientEncryption.decrypt(new BsonBinary(binary.getType(), binary.getData()));
}
if (value instanceof BsonBinary binary) {
r = clientEncryption.decrypt(binary);
}
// in case the driver has auto decryption (aka .bypassAutoEncryption(true)) active
// https://github.com/mongodb/mongo-java-driver/blob/master/driver-sync/src/examples/tour/ClientSideEncryptionExplicitEncryptionOnlyTour.java
if (value == r) {
return r;
}
if(r instanceof Mono mono) {
return mono.map(result -> {
if (persistentProperty.isCollectionLike() && result instanceof Iterable<?> iterable) {
if (!persistentProperty.isEntity()) {
Collection<Object> collection = CollectionFactory.createCollection(persistentProperty.getType(), 10);
iterable.forEach(it -> collection.add(BsonUtils.toJavaType((BsonValue) it)));
return collection;
} else {
Collection<Object> collection = CollectionFactory.createCollection(persistentProperty.getType(), 10);
iterable.forEach(it -> {
collection.add(context.read(BsonUtils.toJavaType((BsonValue) it), persistentProperty.getActualType()));
});
return collection;
}
}
if (!persistentProperty.isEntity() && result instanceof BsonValue bsonValue) {
return BsonUtils.toJavaType(bsonValue);
}
if (persistentProperty.isEntity() && result instanceof BsonDocument bsonDocument) {
return context.read(BsonUtils.toJavaType(bsonDocument), persistentProperty.getTypeInformation());
}
return result;
}).toFuture().get();
}
return r;
}
}
}

View File

@@ -28,6 +28,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
@@ -119,11 +120,7 @@ class SimpleReactiveMongoRepositoryUnitTests {
@Test // DATAMONGO-1854
void shouldAddDefaultCollationToFindOneForExampleIfPresent() {
when(entityInformation.getCollectionName()).thenReturn("testdummy");
doReturn(flux).when(mongoOperations).find(any(Query.class), eq(TestDummy.class), eq("testdummy"));
when(flux.buffer(anyInt())).thenReturn(flux);
when(flux.map(any())).thenReturn(flux);
when(flux.next()).thenReturn(mono);
when(mongoOperations.find(any(), any(), any())).thenReturn(flux);
Collation collation = Collation.of("en_US");

View File

@@ -1,4 +1,4 @@
Spring Data MongoDB 4.1 M1 (2023.0.0)
Spring Data MongoDB 4.0 GA (2022.0.0)
Copyright (c) [2010-2019] Pivotal Software, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
@@ -40,6 +40,5 @@ conditions of the subcomponent's license, as noted in the LICENSE file.