Compare commits

..

11 Commits

Author SHA1 Message Date
Christoph Strobl
d94d273010 Fix test 2023-01-25 15:05:56 +01:00
Christoph Strobl
b9f6463337 decrypt? 2023-01-25 14:32:38 +01:00
Christoph Strobl
095022e71d reactive FLE encryptiion works -> next decrypt 2023-01-25 14:32:35 +01:00
Christoph Strobl
329b4b2881 Hacking - Reactive FLE
experiment with resolving reactive types in document
2023-01-25 14:24:35 +01:00
Christoph Strobl
73aeb7a425 Test encryption during update 2023-01-25 14:24:35 +01:00
Christoph Strobl
4b8ac4d249 Some changes that allow reading the alt key from a field
typically only supported in automatic schema but neat to have it here as well. eg. for customer data cyper based on eg. username.
Also make sure to translate decryption exceptions.
2023-01-25 14:24:35 +01:00
Christoph Strobl
1a7157fa7c Encrypt collection of complex types. 2023-01-25 14:24:35 +01:00
Christoph Strobl
10a089fe77 Encrypt collection of simple values 2023-01-25 14:24:35 +01:00
Christoph Strobl
7b93379165 Enable full encryption of nested documents. 2023-01-25 14:24:35 +01:00
Christoph Strobl
0361c3acc9 Hacking 2023-01-25 14:24:35 +01:00
Christoph Strobl
a6641e0c01 Prepare issue branch. 2023-01-25 14:24:34 +01:00
354 changed files with 4007 additions and 20233 deletions

View File

@@ -1,2 +1,2 @@
#Mon Aug 14 08:53:22 EDT 2023
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip
#Fri Jun 03 09:32:40 CEST 2022
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.5/apache-maven-3.8.5-bin.zip

View File

@@ -16,7 +16,7 @@ All of these use cases are great reasons to essentially run what the CI server d
IMPORTANT: To do this you must have Docker installed on your machine.
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash`
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
+
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
+

47
Jenkinsfile vendored
View File

@@ -77,29 +77,10 @@ pipeline {
}
}
}
stage('Publish JDK (Java 20) + MongoDB 6.0') {
when {
anyOf {
changeset "ci/openjdk20-mongodb-6.0/**"
changeset "ci/pipeline.properties"
}
}
agent { label 'data' }
options { timeout(time: 30, unit: 'MINUTES') }
steps {
script {
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk20-mongodb-6.0/")
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
image.push()
}
}
}
}
}
}
stage("test: baseline (main)") {
stage("test: baseline (Java 17)") {
when {
beforeAgent(true)
anyOf {
@@ -138,7 +119,7 @@ pipeline {
}
parallel {
stage("test: MongoDB 5.0 (main)") {
stage("test: MongoDB 5.0 (Java 17)") {
agent {
label 'data'
}
@@ -160,7 +141,7 @@ pipeline {
}
}
stage("test: MongoDB 6.0 (main)") {
stage("test: MongoDB 6.0 (Java 17)") {
agent {
label 'data'
}
@@ -181,28 +162,6 @@ pipeline {
}
}
}
stage("test: MongoDB 6.0 (next)") {
agent {
label 'data'
}
options { timeout(time: 30, unit: 'MINUTES') }
environment {
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
}
steps {
script {
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}").inside(p['docker.java.inside.basic']) {
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
sh 'sleep 10'
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
sh 'sleep 15'
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
}
}
}
}
}
}

View File

@@ -10,7 +10,7 @@ All of these use cases are great reasons to essentially run what Concourse does
IMPORTANT: To do this you must have Docker installed on your machine.
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
+
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
+
@@ -23,7 +23,7 @@ Since the container is binding to your source, you can make edits from your IDE
If you need to test the `build.sh` script, do this:
1. `mkdir /tmp/spring-data-mongodb-artifactory`
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
+
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
artifactory output directory at `spring-data-mongodb-artifactory`.
@@ -36,4 +36,4 @@ IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
and deliver them to artifactory.
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.

View File

@@ -1,24 +0,0 @@
ARG BASE
FROM ${BASE}
# Any ARG statements before FROM are cleared.
ARG MONGODB
ENV TZ=Etc/UTC
ENV DEBIAN_FRONTEND=noninteractive
RUN set -eux; \
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
# MongoDB 6.0 release signing key
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
# Needed when MongoDB creates a 6.0 folder.
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
echo ${TZ} > /etc/timezone
RUN apt-get update && \
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

View File

@@ -1,21 +1,19 @@
# Java versions
java.main.tag=17.0.8_7-jdk-focal
java.next.tag=20-jdk-jammy
java.main.tag=17.0.5_8-jdk-focal
# Docker container images - standard
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
# Supported versions of MongoDB
docker.mongodb.4.4.version=4.4.23
docker.mongodb.5.0.version=5.0.19
docker.mongodb.6.0.version=6.0.8
docker.mongodb.4.4.version=4.4.17
docker.mongodb.5.0.version=5.0.13
docker.mongodb.6.0.version=6.0.2
# Supported versions of Redis
docker.redis.6.version=6.2.13
docker.redis.6.version=6.2.6
# Supported versions of Cassandra
docker.cassandra.3.version=3.11.15
docker.cassandra.3.version=3.11.14
# Docker environment settings
docker.java.inside.basic=-v $HOME:/tmp/jenkins-home

34
pom.xml
View File

@@ -5,7 +5,7 @@
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4462-SNAPSHOT</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Spring Data MongoDB</name>
@@ -15,7 +15,7 @@
<parent>
<groupId>org.springframework.data.build</groupId>
<artifactId>spring-data-parent</artifactId>
<version>3.2.0-SNAPSHOT</version>
<version>3.1.0-SNAPSHOT</version>
</parent>
<modules>
@@ -26,8 +26,8 @@
<properties>
<project.type>multi</project.type>
<dist.id>spring-data-mongodb</dist.id>
<springdata.commons>3.2.0-SNAPSHOT</springdata.commons>
<mongo>4.10.2</mongo>
<springdata.commons>3.1.0-SNAPSHOT</springdata.commons>
<mongo>4.8.2</mongo>
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
<jmh.version>1.19</jmh.version>
</properties>
@@ -145,19 +145,33 @@
<repositories>
<repository>
<id>spring-snapshot</id>
<url>https://repo.spring.io/snapshot</url>
<id>spring-libs-snapshot</id>
<url>https://repo.spring.io/libs-snapshot</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<repository>
<id>sonatype-libs-snapshot</id>
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
<releases>
<enabled>false</enabled>
</releases>
</repository>
<repository>
<id>spring-milestone</id>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>spring-plugins-release</id>
<url>https://repo.spring.io/plugins-release</url>
</pluginRepository>
<pluginRepository>
<id>spring-libs-milestone</id>
<url>https://repo.spring.io/libs-milestone</url>
</pluginRepository>
</pluginRepositories>
</project>

View File

@@ -7,7 +7,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4462-SNAPSHOT</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@@ -96,14 +96,15 @@ class MongoResultsWriter implements ResultsWriter {
for (Object key : doc.keySet()) {
Object value = doc.get(key);
if (value instanceof Document document) {
value = fixDocumentKeys(document);
} else if (value instanceof BasicDBObject basicDBObject) {
value = fixDocumentKeys(new Document(basicDBObject));
if (value instanceof Document) {
value = fixDocumentKeys((Document) value);
} else if (value instanceof BasicDBObject) {
value = fixDocumentKeys(new Document((BasicDBObject) value));
}
if (key instanceof String newKey) {
if (key instanceof String) {
String newKey = (String) key;
if (newKey.contains(".")) {
newKey = newKey.replace('.', ',');
}

View File

@@ -15,18 +15,13 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4462-SNAPSHOT</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<properties>
<project.root>${basedir}/..</project.root>
<dist.key>SDMONGO</dist.key>
<!-- Observability -->
<micrometer-docs-generator.inputPath>${maven.multiModuleProjectDirectory}/spring-data-mongodb/</micrometer-docs-generator.inputPath>
<micrometer-docs-generator.inclusionPattern>.*</micrometer-docs-generator.inclusionPattern>
<micrometer-docs-generator.outputPath>${maven.multiModuleProjectDirectory}/target/</micrometer-docs-generator.outputPath>
</properties>
<build>
@@ -35,36 +30,6 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<executions>
<execution>
<id>generate-docs</id>
<phase>generate-resources</phase>
<goals>
<goal>java</goal>
</goals>
<configuration>
<mainClass>io.micrometer.docs.DocsGeneratorCommand</mainClass>
<includePluginDependencies>true</includePluginDependencies>
<arguments>
<argument>${micrometer-docs-generator.inputPath}</argument>
<argument>${micrometer-docs-generator.inclusionPattern}</argument>
<argument>${micrometer-docs-generator.outputPath}</argument>
</arguments>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-docs-generator</artifactId>
<version>1.0.1</version>
<type>jar</type>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.asciidoctor</groupId>
<artifactId>asciidoctor-maven-plugin</artifactId>
@@ -80,4 +45,15 @@
</build>
<pluginRepositories>
<pluginRepository>
<id>spring-plugins-release</id>
<url>https://repo.spring.io/plugins-release</url>
</pluginRepository>
<pluginRepository>
<id>spring-plugins-snapshot</id>
<url>https://repo.spring.io/libs-snapshot</url>
</pluginRepository>
</pluginRepositories>
</project>

View File

@@ -13,7 +13,7 @@
<parent>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb-parent</artifactId>
<version>4.2.x-4462-SNAPSHOT</version>
<version>4.1.x-MANUAL-ENCRYPTION-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@@ -115,7 +115,7 @@
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-crypt</artifactId>
<version>1.8.0</version>
<version>1.6.1</version>
<optional>true</optional>
</dependency>

View File

@@ -16,13 +16,12 @@
package org.springframework.data.mongodb;
import org.springframework.dao.UncategorizedDataAccessException;
import org.springframework.lang.Nullable;
public class UncategorizedMongoDbException extends UncategorizedDataAccessException {
private static final long serialVersionUID = -2336595514062364929L;
public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) {
public UncategorizedMongoDbException(String msg, Throwable cause) {
super(msg, cause);
}
}

View File

@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.aot;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
@@ -26,6 +25,7 @@ import java.util.Set;
import org.springframework.aot.generate.GenerationContext;
import org.springframework.aot.hint.MemberCategory;
import org.springframework.aot.hint.TypeReference;
import org.springframework.core.ResolvableType;
import org.springframework.core.annotation.AnnotatedElementUtils;
import org.springframework.core.annotation.MergedAnnotations;
import org.springframework.data.annotation.Reference;
@@ -33,6 +33,7 @@ import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory;
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor;
import org.springframework.data.mongodb.core.mapping.DBRef;
import org.springframework.data.mongodb.core.mapping.DocumentReference;
import org.springframework.data.util.TypeUtils;
/**
* @author Christoph Strobl
@@ -65,7 +66,9 @@ public class LazyLoadingProxyAotProcessor {
if (field.getType().isInterface()) {
List<Class<?>> interfaces = new ArrayList<>(
Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces()));
TypeUtils.resolveTypesInSignature(ResolvableType.forField(field, type)));
interfaces.add(0, org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class);
interfaces.add(org.springframework.aop.SpringProxy.class);
interfaces.add(org.springframework.aop.framework.Advised.class);
interfaces.add(org.springframework.core.DecoratingProxy.class);
@@ -74,7 +77,7 @@ public class LazyLoadingProxyAotProcessor {
} else {
Class<?> proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(),
LazyLoadingInterceptor::none);
() -> LazyLoadingInterceptor.none());
// see: spring-projects/spring-framework/issues/29309
generationContext.getRuntimeHints().reflection().registerType(proxyClass,

View File

@@ -206,7 +206,7 @@ public abstract class MongoConfigurationSupport {
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
*
* @return {@literal false} by default. <br />
* <strong>INFO:</strong> As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
* @since 2.2
*/
protected boolean autoIndexCreation() {

View File

@@ -19,7 +19,6 @@ import java.util.List;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
import org.springframework.data.util.Pair;
import com.mongodb.bulk.BulkWriteResult;
@@ -29,15 +28,6 @@ import com.mongodb.bulk.BulkWriteResult;
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
* operations or list of similar operations in sequence which can then eventually be executed by calling
* {@link #execute()}.
*
* <pre class="code">
* MongoOperations ops = …;
*
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
* .insert(newPerson)
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
* .execute();
* </pre>
* <p>
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
@@ -85,19 +75,7 @@ public interface BulkOperations {
* @param update {@link Update} operation to perform, must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
default BulkOperations updateOne(Query query, Update update) {
return updateOne(query, (UpdateDefinition) update);
}
/**
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
*
* @param query update criteria, must not be {@literal null}.
* @param update {@link Update} operation to perform, must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
* @since 4.1
*/
BulkOperations updateOne(Query query, UpdateDefinition update);
BulkOperations updateOne(Query query, Update update);
/**
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
@@ -105,7 +83,7 @@ public interface BulkOperations {
* @param updates Update operations to perform.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates);
BulkOperations updateOne(List<Pair<Query, Update>> updates);
/**
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
@@ -114,19 +92,7 @@ public interface BulkOperations {
* @param update Update operation to perform.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
default BulkOperations updateMulti(Query query, Update update) {
return updateMulti(query, (UpdateDefinition) update);
}
/**
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
*
* @param query Update criteria.
* @param update Update operation to perform.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
* @since 4.1
*/
BulkOperations updateMulti(Query query, UpdateDefinition update);
BulkOperations updateMulti(Query query, Update update);
/**
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
@@ -134,7 +100,7 @@ public interface BulkOperations {
* @param updates Update operations to perform.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates);
BulkOperations updateMulti(List<Pair<Query, Update>> updates);
/**
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
@@ -144,20 +110,7 @@ public interface BulkOperations {
* @param update Update operation to perform.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
*/
default BulkOperations upsert(Query query, Update update) {
return upsert(query, (UpdateDefinition) update);
}
/**
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
* else an insert.
*
* @param query Update criteria.
* @param update Update operation to perform.
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
* @since 4.1
*/
BulkOperations upsert(Query query, UpdateDefinition update);
BulkOperations upsert(Query query, Update update);
/**
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
@@ -189,7 +142,7 @@ public interface BulkOperations {
*
* @param query Update criteria.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
* @since 2.2
*/
default BulkOperations replaceOne(Query query, Object replacement) {
@@ -202,7 +155,7 @@ public interface BulkOperations {
* @param query Update criteria.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
* @since 2.2
*/
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);

View File

@@ -1,221 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.springframework.context.ApplicationEvent;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.convert.UpdateMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
import org.springframework.util.Assert;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.DeleteManyModel;
import com.mongodb.client.model.DeleteOneModel;
import com.mongodb.client.model.InsertOneModel;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.UpdateManyModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.WriteModel;
/**
* Support class for bulk operations.
*
* @author Mark Paluch
* @since 4.1
*/
abstract class BulkOperationsSupport {
private final String collectionName;
BulkOperationsSupport(String collectionName) {
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
this.collectionName = collectionName;
}
/**
* Emit a {@link BeforeSaveEvent}.
*
* @param holder
*/
void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
if (holder.model() instanceof InsertOneModel) {
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
} else if (holder.model() instanceof ReplaceOneModel) {
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
}
}
/**
* Emit a {@link AfterSaveEvent}.
*
* @param holder
*/
void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
if (holder.model() instanceof InsertOneModel) {
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
} else if (holder.model() instanceof ReplaceOneModel) {
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
}
}
WriteModel<Document> mapWriteModel(Object source, WriteModel<Document> writeModel) {
if (writeModel instanceof UpdateOneModel<Document> model) {
if (source instanceof AggregationUpdate aggregationUpdate) {
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
}
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
model.getOptions());
}
if (writeModel instanceof UpdateManyModel<Document> model) {
if (source instanceof AggregationUpdate aggregationUpdate) {
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
}
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
model.getOptions());
}
if (writeModel instanceof DeleteOneModel<Document> model) {
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
}
if (writeModel instanceof DeleteManyModel<Document> model) {
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
}
return writeModel;
}
private List<Document> mapUpdatePipeline(AggregationUpdate source) {
Class<?> type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class;
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type,
updateMapper().getMappingContext(), queryMapper());
return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context);
}
/**
* Emit a {@link ApplicationEvent} if event multicasting is enabled.
*
* @param event
*/
protected abstract void maybeEmitEvent(ApplicationEvent event);
/**
* @return the {@link UpdateMapper} to use.
*/
protected abstract UpdateMapper updateMapper();
/**
* @return the {@link QueryMapper} to use.
*/
protected abstract QueryMapper queryMapper();
/**
* @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}.
*/
protected abstract Optional<? extends MongoPersistentEntity<?>> entity();
protected Bson getMappedUpdate(Bson update) {
return updateMapper().getMappedObject(update, entity());
}
protected Bson getMappedQuery(Bson query) {
return queryMapper().getMappedObject(query, entity());
}
protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
BulkWriteOptions options = new BulkWriteOptions();
return switch (bulkMode) {
case ORDERED -> options.ordered(true);
case UNORDERED -> options.ordered(false);
};
}
/**
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
* @param update The {@link Update} to apply
* @param upsert flag to indicate if document should be upserted.
* @return new instance of {@link UpdateOptions}.
*/
protected static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
UpdateOptions options = new UpdateOptions();
options.upsert(upsert);
if (update.hasArrayFilters()) {
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
list.add(arrayFilter.asDocument());
}
options.arrayFilters(list);
}
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
return options;
}
/**
* Value object chaining together an actual source with its {@link WriteModel} representation.
*
* @author Christoph Strobl
*/
record SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
}
}

View File

@@ -150,12 +150,12 @@ public class ChangeStreamOptions {
return timestamp;
}
if (timestamp instanceof Instant instant) {
return new BsonTimestamp((int) instant.getEpochSecond(), 0);
if (timestamp instanceof Instant) {
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
}
if (timestamp instanceof BsonTimestamp bsonTimestamp) {
return Instant.ofEpochSecond(bsonTimestamp.getTime());
if (timestamp instanceof BsonTimestamp) {
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
}
throw new IllegalArgumentException(

View File

@@ -114,7 +114,7 @@ public class CollectionOptions {
/**
* Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}. <br />
* <strong>NOTE:</strong> Using capped collections requires defining {@link #size(long)}.
* <strong>NOTE</strong> Using capped collections requires defining {@link #size(long)}.
*
* @return new {@link CollectionOptions}.
* @since 2.0

View File

@@ -1,61 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.util.Assert;
import com.mongodb.client.MongoCollection;
/**
* Interface for functional preparation of a {@link MongoCollection}.
*
* @author Mark Paluch
* @since 4.1
*/
public interface CollectionPreparer<T> {
/**
* Returns a preparer that always returns its input collection.
*
* @return a preparer that always returns its input collection.
*/
static <T> CollectionPreparer<T> identity() {
return it -> it;
}
/**
* Prepare the {@code collection}.
*
* @param collection the collection to prepare.
* @return the prepared collection.
*/
T prepare(T collection);
/**
* Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies
* the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to
* the caller of the composed function.
*
* @param after the collection preparer to apply after this function is applied.
* @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after}
* preparer.
*/
default CollectionPreparer<T> andThen(CollectionPreparer<T> after) {
Assert.notNull(after, "After CollectionPreparer must not be null");
return c -> after.prepare(prepare(c));
}
}

View File

@@ -1,182 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.Arrays;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.bson.Document;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
import com.mongodb.client.MongoCollection;
/**
* Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon
* {@link CollectionPreparer preparing a collection}.
*
* @author Mark Paluch
* @since 4.1
*/
class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware {
private final List<Object> sources;
private CollectionPreparerSupport(List<Object> sources) {
this.sources = sources;
}
<T> T doPrepare(T collection, Function<T, ReadConcern> concernAccessor, BiFunction<T, ReadConcern, T> concernFunction,
Function<T, ReadPreference> preferenceAccessor, BiFunction<T, ReadPreference, T> preferenceFunction) {
T collectionToUse = collection;
for (Object source : sources) {
if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) {
ReadConcern concern = rca.getReadConcern();
if (concernAccessor.apply(collectionToUse) != concern) {
collectionToUse = concernFunction.apply(collectionToUse, concern);
}
break;
}
}
for (Object source : sources) {
if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
ReadPreference preference = rpa.getReadPreference();
if (preferenceAccessor.apply(collectionToUse) != preference) {
collectionToUse = preferenceFunction.apply(collectionToUse, preference);
}
break;
}
}
return collectionToUse;
}
@Override
public boolean hasReadConcern() {
for (Object aware : sources) {
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
return true;
}
}
return false;
}
@Override
public ReadConcern getReadConcern() {
for (Object aware : sources) {
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
return rca.getReadConcern();
}
}
return null;
}
@Override
public boolean hasReadPreference() {
for (Object aware : sources) {
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
return true;
}
}
return false;
}
@Override
public ReadPreference getReadPreference() {
for (Object aware : sources) {
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
return rpa.getReadPreference();
}
}
return null;
}
static class CollectionPreparerDelegate extends CollectionPreparerSupport
implements CollectionPreparer<MongoCollection<Document>> {
private CollectionPreparerDelegate(List<Object> sources) {
super(sources);
}
public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) {
return of((Object[]) awares);
}
public static CollectionPreparerDelegate of(Object... mixedAwares) {
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
return (CollectionPreparerDelegate) mixedAwares[0];
}
return new CollectionPreparerDelegate(Arrays.asList(mixedAwares));
}
@Override
public MongoCollection<Document> prepare(MongoCollection<Document> collection) {
return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern,
MongoCollection::getReadPreference, MongoCollection::withReadPreference);
}
}
static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport
implements CollectionPreparer<com.mongodb.reactivestreams.client.MongoCollection<Document>> {
private ReactiveCollectionPreparerDelegate(List<Object> sources) {
super(sources);
}
public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) {
return of((Object[]) awares);
}
public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) {
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
return (ReactiveCollectionPreparerDelegate) mixedAwares[0];
}
return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares));
}
@Override
public com.mongodb.reactivestreams.client.MongoCollection<Document> prepare(
com.mongodb.reactivestreams.client.MongoCollection<Document> collection) {
return doPrepare(collection, //
com.mongodb.reactivestreams.client.MongoCollection::getReadConcern,
com.mongodb.reactivestreams.client.MongoCollection::withReadConcern,
com.mongodb.reactivestreams.client.MongoCollection::getReadPreference,
com.mongodb.reactivestreams.client.MongoCollection::withReadPreference);
}
}
}

View File

@@ -64,15 +64,18 @@ class CountQuery {
for (Map.Entry<String, Object> entry : source.entrySet()) {
if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) {
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and")));
Document theValue = (Document) entry.getValue();
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
continue;
}
if (entry.getValue() instanceof Collection<?> collection && requiresRewrite(entry.getValue())) {
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
target.put(entry.getKey(), rewriteCollection(collection));
Collection<?> source = (Collection<?>) entry.getValue();
target.put(entry.getKey(), rewriteCollection(source));
continue;
}
@@ -93,12 +96,12 @@ class CountQuery {
*/
private boolean requiresRewrite(Object valueToInspect) {
if (valueToInspect instanceof Document document) {
return requiresRewrite(document);
if (valueToInspect instanceof Document) {
return requiresRewrite((Document) valueToInspect);
}
if (valueToInspect instanceof Collection<?> collection) {
return requiresRewrite(collection);
if (valueToInspect instanceof Collection) {
return requiresRewrite((Collection<?>) valueToInspect);
}
return false;
@@ -107,7 +110,7 @@ class CountQuery {
private boolean requiresRewrite(Collection<?> collection) {
for (Object o : collection) {
if (o instanceof Document document && requiresRewrite(document)) {
if (o instanceof Document && requiresRewrite((Document) o)) {
return true;
}
}
@@ -136,8 +139,8 @@ class CountQuery {
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
for (Object item : source) {
if (item instanceof Document document && requiresRewrite(item)) {
rewrittenCollection.add(CountQuery.of(document).toQueryDocument());
if (item instanceof Document && requiresRewrite(item)) {
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
} else {
rewrittenCollection.add(item);
}
@@ -239,8 +242,8 @@ class CountQuery {
return value;
}
if (value instanceof Point point) {
return Arrays.asList(point.getX(), point.getY());
if (value instanceof Point) {
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
}
if (value instanceof Document document) {

View File

@@ -16,47 +16,42 @@
package org.springframework.data.mongodb.core;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.bson.Document;
import org.springframework.context.ApplicationEvent;
import org.bson.conversions.Bson;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.data.mapping.callback.EntityCallback;
import org.springframework.data.mapping.callback.EntityCallbacks;
import org.springframework.data.mongodb.BulkOperationException;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.convert.UpdateMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
import org.springframework.data.util.Pair;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
import com.mongodb.MongoBulkWriteException;
import com.mongodb.WriteConcern;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.DeleteManyModel;
import com.mongodb.client.model.DeleteOptions;
import com.mongodb.client.model.InsertOneModel;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.ReplaceOptions;
import com.mongodb.client.model.UpdateManyModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.WriteModel;
import com.mongodb.client.model.*;
/**
* Default implementation for {@link BulkOperations}.
@@ -72,7 +67,7 @@ import com.mongodb.client.model.WriteModel;
* @author Jacob Botuck
* @since 1.9
*/
class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations {
class DefaultBulkOperations implements BulkOperations {
private final MongoOperations mongoOperations;
private final String collectionName;
@@ -80,6 +75,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
private @Nullable WriteConcern defaultWriteConcern;
private BulkWriteOptions bulkOptions;
/**
@@ -94,7 +90,6 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
DefaultBulkOperations(MongoOperations mongoOperations, String collectionName,
BulkOperationContext bulkOperationContext) {
super(collectionName);
Assert.notNull(mongoOperations, "MongoOperations must not be null");
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
@@ -102,7 +97,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
this.mongoOperations = mongoOperations;
this.collectionName = collectionName;
this.bulkOperationContext = bulkOperationContext;
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
}
/**
@@ -137,20 +132,21 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
}
@Override
public BulkOperations updateOne(Query query, UpdateDefinition update) {
@SuppressWarnings("unchecked")
public BulkOperations updateOne(Query query, Update update) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(update, "Update must not be null");
return update(query, update, false, false);
return updateOne(Collections.singletonList(Pair.of(query, update)));
}
@Override
public BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates) {
public BulkOperations updateOne(List<Pair<Query, Update>> updates) {
Assert.notNull(updates, "Updates must not be null");
for (Pair<Query, UpdateDefinition> update : updates) {
for (Pair<Query, Update> update : updates) {
update(update.getFirst(), update.getSecond(), false, false);
}
@@ -158,22 +154,21 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
}
@Override
public BulkOperations updateMulti(Query query, UpdateDefinition update) {
@SuppressWarnings("unchecked")
public BulkOperations updateMulti(Query query, Update update) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(update, "Update must not be null");
update(query, update, false, true);
return this;
return updateMulti(Collections.singletonList(Pair.of(query, update)));
}
@Override
public BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates) {
public BulkOperations updateMulti(List<Pair<Query, Update>> updates) {
Assert.notNull(updates, "Updates must not be null");
for (Pair<Query, UpdateDefinition> update : updates) {
for (Pair<Query, Update> update : updates) {
update(update.getFirst(), update.getSecond(), false, true);
}
@@ -181,7 +176,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
}
@Override
public BulkOperations upsert(Query query, UpdateDefinition update) {
public BulkOperations upsert(Query query, Update update) {
return update(query, update, true, true);
}
@@ -253,7 +248,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
return result;
} finally {
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
}
}
@@ -272,8 +267,9 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
bulkOptions);
} catch (RuntimeException ex) {
if (ex instanceof MongoBulkWriteException mongoBulkWriteException) {
if (ex instanceof MongoBulkWriteException) {
MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex;
if (mongoBulkWriteException.getWriteConcernError() != null) {
throw new DataIntegrityViolationException(ex.getMessage(), ex);
}
@@ -288,17 +284,17 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
maybeEmitBeforeSaveEvent(it);
if (it.model() instanceof InsertOneModel<Document> model) {
if (it.getModel() instanceof InsertOneModel) {
Document target = model.getDocument();
maybeInvokeBeforeSaveCallback(it.source(), target);
} else if (it.model() instanceof ReplaceOneModel<Document> model) {
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
maybeInvokeBeforeSaveCallback(it.getSource(), target);
} else if (it.getModel() instanceof ReplaceOneModel) {
Document target = model.getReplacement();
maybeInvokeBeforeSaveCallback(it.source(), target);
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
maybeInvokeBeforeSaveCallback(it.getSource(), target);
}
return mapWriteModel(it.source(), it.model());
return mapWriteModel(it.getModel());
}
/**
@@ -310,7 +306,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
* @param multi whether to issue a multi-update.
* @return the {@link BulkOperations} with the update registered.
*/
private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(update, "Update must not be null");
@@ -326,30 +322,53 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
return this;
}
@Override
protected void maybeEmitEvent(ApplicationEvent event) {
bulkOperationContext.publishEvent(event);
private WriteModel<Document> mapWriteModel(WriteModel<Document> writeModel) {
if (writeModel instanceof UpdateOneModel) {
UpdateOneModel<Document> model = (UpdateOneModel<Document>) writeModel;
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
model.getOptions());
}
if (writeModel instanceof UpdateManyModel) {
UpdateManyModel<Document> model = (UpdateManyModel<Document>) writeModel;
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
model.getOptions());
}
if (writeModel instanceof DeleteOneModel) {
DeleteOneModel<Document> model = (DeleteOneModel<Document>) writeModel;
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
}
if (writeModel instanceof DeleteManyModel) {
DeleteManyModel<Document> model = (DeleteManyModel<Document>) writeModel;
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
}
return writeModel;
}
@Override
protected UpdateMapper updateMapper() {
return bulkOperationContext.updateMapper();
private Bson getMappedUpdate(Bson update) {
return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity());
}
@Override
protected QueryMapper queryMapper() {
return bulkOperationContext.queryMapper();
}
@Override
protected Optional<? extends MongoPersistentEntity<?>> entity() {
return bulkOperationContext.entity();
private Bson getMappedQuery(Bson query) {
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
}
private Document getMappedObject(Object source) {
if (source instanceof Document document) {
return document;
if (source instanceof Document) {
return (Document) source;
}
Document sink = new Document();
@@ -362,83 +381,268 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
models.add(new SourceAwareWriteModelHolder(source, model));
}
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
if (holder.model() instanceof InsertOneModel<Document> model) {
if (holder.getModel() instanceof InsertOneModel) {
Document target = model.getDocument();
maybeInvokeAfterSaveCallback(holder.source(), target);
} else if (holder.model() instanceof ReplaceOneModel<Document> model) {
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
} else if (holder.getModel() instanceof ReplaceOneModel) {
Document target = model.getReplacement();
maybeInvokeAfterSaveCallback(holder.source(), target);
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
}
}
private void publishEvent(MongoMappingEvent<?> event) {
bulkOperationContext.publishEvent(event);
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
if (holder.getModel() instanceof InsertOneModel) {
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
} else if (holder.getModel() instanceof ReplaceOneModel) {
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
}
}
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
if (holder.getModel() instanceof InsertOneModel) {
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
maybeInvokeAfterSaveCallback(holder.getSource(), target);
} else if (holder.getModel() instanceof ReplaceOneModel) {
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
maybeInvokeAfterSaveCallback(holder.getSource(), target);
}
}
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
if (bulkOperationContext.getEventPublisher() == null) {
return event;
}
bulkOperationContext.getEventPublisher().publishEvent(event);
return event;
}
private Object maybeInvokeBeforeConvertCallback(Object value) {
return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName);
if (bulkOperationContext.getEntityCallbacks() == null) {
return value;
}
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
}
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName);
if (bulkOperationContext.getEntityCallbacks() == null) {
return value;
}
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
collectionName);
}
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName);
if (bulkOperationContext.getEntityCallbacks() == null) {
return value;
}
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
collectionName);
}
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
BulkWriteOptions options = new BulkWriteOptions();
switch (bulkMode) {
case ORDERED:
return options.ordered(true);
case UNORDERED:
return options.ordered(false);
}
throw new IllegalStateException("BulkMode was null");
}
/**
* {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
* @param update The {@link Update} to apply
* @param upsert flag to indicate if document should be upserted.
* @return new instance of {@link UpdateOptions}.
*/
private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
UpdateOptions options = new UpdateOptions();
options.upsert(upsert);
if (update.hasArrayFilters()) {
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
list.add(arrayFilter.asDocument());
}
options.arrayFilters(list);
}
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
return options;
}
/**
* {@link BulkOperationContext} holds information about
* {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to
* {@link QueryMapper} and {@link UpdateMapper}.
*
* @author Christoph Strobl
* @since 2.0
*/
record BulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
@Nullable EntityCallbacks entityCallbacks) {
static final class BulkOperationContext {
public boolean skipEntityCallbacks() {
return entityCallbacks == null;
private final BulkMode bulkMode;
private final Optional<? extends MongoPersistentEntity<?>> entity;
private final QueryMapper queryMapper;
private final UpdateMapper updateMapper;
private final ApplicationEventPublisher eventPublisher;
private final EntityCallbacks entityCallbacks;
BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher,
EntityCallbacks entityCallbacks) {
this.bulkMode = bulkMode;
this.entity = entity;
this.queryMapper = queryMapper;
this.updateMapper = updateMapper;
this.eventPublisher = eventPublisher;
this.entityCallbacks = entityCallbacks;
}
public boolean skipEventPublishing() {
return eventPublisher == null;
public BulkMode getBulkMode() {
return this.bulkMode;
}
@SuppressWarnings("rawtypes")
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
public Optional<? extends MongoPersistentEntity<?>> getEntity() {
return this.entity;
}
if (skipEntityCallbacks()) {
return entity;
public QueryMapper getQueryMapper() {
return this.queryMapper;
}
public UpdateMapper getUpdateMapper() {
return this.updateMapper;
}
public ApplicationEventPublisher getEventPublisher() {
return this.eventPublisher;
}
public EntityCallbacks getEntityCallbacks() {
return this.entityCallbacks;
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
BulkOperationContext that = (BulkOperationContext) o;
if (bulkMode != that.bulkMode)
return false;
if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) {
return false;
}
return entityCallbacks.callback(callbackType, entity, collectionName);
if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) {
return false;
}
if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) {
return false;
}
if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) {
return false;
}
return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks);
}
@SuppressWarnings("rawtypes")
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
String collectionName) {
if (skipEntityCallbacks()) {
return entity;
}
return entityCallbacks.callback(callbackType, entity, document, collectionName);
@Override
public int hashCode() {
int result = bulkMode != null ? bulkMode.hashCode() : 0;
result = 31 * result + ObjectUtils.nullSafeHashCode(entity);
result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper);
result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper);
result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher);
result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks);
return result;
}
public void publishEvent(ApplicationEvent event) {
if (skipEventPublishing()) {
return;
}
eventPublisher.publishEvent(event);
public String toString() {
return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity="
+ this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper()
+ ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")";
}
}
/**
* Value object chaining together an actual source with its {@link WriteModel} representation.
*
* @since 2.2
* @author Christoph Strobl
*/
private static final class SourceAwareWriteModelHolder {
private final Object source;
private final WriteModel<Document> model;
SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
this.source = source;
this.model = model;
}
public Object getSource() {
return this.source;
}
public WriteModel<Document> getModel() {
return this.model;
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o;
if (!ObjectUtils.nullSafeEquals(this.source, that.source)) {
return false;
}
return ObjectUtils.nullSafeEquals(this.model, that.model);
}
@Override
public int hashCode() {
int result = ObjectUtils.nullSafeHashCode(model);
result = 31 * result + ObjectUtils.nullSafeHashCode(source);
return result;
}
public String toString() {
return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model="
+ this.getModel() + ")";
}
}
}

View File

@@ -22,7 +22,6 @@ import java.util.List;
import org.bson.Document;
import org.springframework.dao.DataAccessException;
import org.springframework.data.mongodb.MongoDatabaseFactory;
import org.springframework.data.mongodb.UncategorizedMongoDbException;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexInfo;
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.index.IndexOperations;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.NumberUtils;
import com.mongodb.MongoException;
import com.mongodb.client.MongoCollection;
@@ -157,20 +155,6 @@ public class DefaultIndexOperations implements IndexOperations {
}
@Override
public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
Document indexOptions = new Document("name", name);
indexOptions.putAll(options.toDocument());
Document result = mongoOperations
.execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)));
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
}
}
public void dropAllIndexes() {
dropIndex("*");
}

View File

@@ -1,390 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.bson.Document;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.data.mapping.callback.EntityCallback;
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.convert.UpdateMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import com.mongodb.WriteConcern;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.DeleteManyModel;
import com.mongodb.client.model.DeleteOptions;
import com.mongodb.client.model.InsertOneModel;
import com.mongodb.client.model.ReplaceOneModel;
import com.mongodb.client.model.ReplaceOptions;
import com.mongodb.client.model.UpdateManyModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.reactivestreams.client.MongoCollection;
/**
* Default implementation for {@link ReactiveBulkOperations}.
*
* @author Christoph Strobl
* @author Mark Paluch
* @since 4.1
*/
class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations {
private final ReactiveMongoOperations mongoOperations;
private final String collectionName;
private final ReactiveBulkOperationContext bulkOperationContext;
private final List<Mono<SourceAwareWriteModelHolder>> models = new ArrayList<>();
private @Nullable WriteConcern defaultWriteConcern;
private BulkWriteOptions bulkOptions;
/**
* Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and
* {@link ReactiveBulkOperationContext}.
*
* @param mongoOperations must not be {@literal null}.
* @param collectionName must not be {@literal null}.
* @param bulkOperationContext must not be {@literal null}.
*/
DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName,
ReactiveBulkOperationContext bulkOperationContext) {
super(collectionName);
Assert.notNull(mongoOperations, "MongoOperations must not be null");
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
this.mongoOperations = mongoOperations;
this.collectionName = collectionName;
this.bulkOperationContext = bulkOperationContext;
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
}
/**
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
*
* @param defaultWriteConcern can be {@literal null}.
*/
void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) {
this.defaultWriteConcern = defaultWriteConcern;
}
@Override
public ReactiveBulkOperations insert(Object document) {
Assert.notNull(document, "Document must not be null");
this.models.add(Mono.just(document).flatMap(it -> {
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
return maybeInvokeBeforeConvertCallback(it);
}).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it)))));
return this;
}
@Override
public ReactiveBulkOperations insert(List<? extends Object> documents) {
Assert.notNull(documents, "Documents must not be null");
documents.forEach(this::insert);
return this;
}
@Override
public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(update, "Update must not be null");
update(query, update, false, false);
return this;
}
@Override
public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(update, "Update must not be null");
update(query, update, false, true);
return this;
}
@Override
public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) {
return update(query, update, true, true);
}
@Override
public ReactiveBulkOperations remove(Query query) {
Assert.notNull(query, "Query must not be null");
DeleteOptions deleteOptions = new DeleteOptions();
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
this.models.add(Mono.just(query)
.map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions))));
return this;
}
@Override
public ReactiveBulkOperations remove(List<Query> removes) {
Assert.notNull(removes, "Removals must not be null");
for (Query query : removes) {
remove(query);
}
return this;
}
@Override
public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(replacement, "Replacement must not be null");
Assert.notNull(options, "Options must not be null");
ReplaceOptions replaceOptions = new ReplaceOptions();
replaceOptions.upsert(options.isUpsert());
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
this.models.add(Mono.just(replacement).flatMap(it -> {
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
return maybeInvokeBeforeConvertCallback(it);
}).map(it -> new SourceAwareWriteModelHolder(it,
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(it), replaceOptions))));
return this;
}
@Override
public Mono<BulkWriteResult> execute() {
try {
return mongoOperations.execute(collectionName, this::bulkWriteTo).next();
} finally {
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
}
}
private Mono<BulkWriteResult> bulkWriteTo(MongoCollection<Document> collection) {
if (defaultWriteConcern != null) {
collection = collection.withWriteConcern(defaultWriteConcern);
}
Flux<SourceAwareWriteModelHolder> concat = Flux.concat(models).flatMap(it -> {
if (it.model()instanceof InsertOneModel<Document> iom) {
Document target = iom.getDocument();
maybeEmitBeforeSaveEvent(it);
return maybeInvokeBeforeSaveCallback(it.source(), target)
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom)));
} else if (it.model()instanceof ReplaceOneModel<Document> rom) {
Document target = rom.getReplacement();
maybeEmitBeforeSaveEvent(it);
return maybeInvokeBeforeSaveCallback(it.source(), target)
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom)));
}
return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model())));
});
MongoCollection<Document> theCollection = collection;
return concat.collectList().flatMap(it -> {
return Mono
.from(theCollection
.bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions))
.doOnSuccess(state -> {
it.forEach(this::maybeEmitAfterSaveEvent);
}).flatMap(state -> {
List<Mono<Object>> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList());
return Flux.concat(monos).then(Mono.just(state));
});
});
}
/**
* Performs update and upsert bulk operations.
*
* @param query the {@link Query} to determine documents to update.
* @param update the {@link Update} to perform, must not be {@literal null}.
* @param upsert whether to upsert.
* @param multi whether to issue a multi-update.
* @return the {@link BulkOperations} with the update registered.
*/
private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
Assert.notNull(query, "Query must not be null");
Assert.notNull(update, "Update must not be null");
UpdateOptions options = computeUpdateOptions(query, update, upsert);
this.models.add(Mono.just(update).map(it -> {
if (multi) {
return new SourceAwareWriteModelHolder(update,
new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options));
}
return new SourceAwareWriteModelHolder(update,
new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options));
}));
return this;
}
@Override
protected void maybeEmitEvent(ApplicationEvent event) {
bulkOperationContext.publishEvent(event);
}
@Override
protected UpdateMapper updateMapper() {
return bulkOperationContext.updateMapper();
}
@Override
protected QueryMapper queryMapper() {
return bulkOperationContext.queryMapper();
}
@Override
protected Optional<? extends MongoPersistentEntity<?>> entity() {
return bulkOperationContext.entity();
}
private Document getMappedObject(Object source) {
if (source instanceof Document) {
return (Document) source;
}
Document sink = new Document();
mongoOperations.getConverter().write(source, sink);
return sink;
}
private Mono<Object> maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
if (holder.model() instanceof InsertOneModel) {
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
return maybeInvokeAfterSaveCallback(holder.source(), target);
} else if (holder.model() instanceof ReplaceOneModel) {
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
return maybeInvokeAfterSaveCallback(holder.source(), target);
}
return Mono.just(holder.source());
}
private Mono<Object> maybeInvokeBeforeConvertCallback(Object value) {
return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName);
}
private Mono<Object> maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName);
}
private Mono<Object> maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName);
}
/**
* {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as
* references to {@link QueryMapper} and {@link UpdateMapper}.
*
* @author Christoph Strobl
* @since 2.0
*/
record ReactiveBulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
@Nullable ReactiveEntityCallbacks entityCallbacks) {
public boolean skipEntityCallbacks() {
return entityCallbacks == null;
}
public boolean skipEventPublishing() {
return eventPublisher == null;
}
@SuppressWarnings("rawtypes")
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
if (skipEntityCallbacks()) {
return Mono.just(entity);
}
return entityCallbacks.callback(callbackType, entity, collectionName);
}
@SuppressWarnings("rawtypes")
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
String collectionName) {
if (skipEntityCallbacks()) {
return Mono.just(entity);
}
return entityCallbacks.callback(callbackType, entity, document, collectionName);
}
public void publishEvent(ApplicationEvent event) {
if (skipEventPublishing()) {
return;
}
eventPublisher.publishEvent(event);
}
}
}

View File

@@ -22,7 +22,6 @@ import java.util.Collection;
import java.util.Optional;
import org.bson.Document;
import org.springframework.data.mongodb.UncategorizedMongoDbException;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.index.IndexDefinition;
import org.springframework.data.mongodb.core.index.IndexInfo;
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.index.ReactiveIndexOperations;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.NumberUtils;
import com.mongodb.client.model.IndexOptions;
@@ -106,22 +104,6 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
}).next();
}
@Override
public Mono<Void> alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
return mongoOperations.execute(db -> {
Document indexOptions = new Document("name", name);
indexOptions.putAll(options.toDocument());
return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)))
.doOnNext(result -> {
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
}
});
}).then();
}
@Nullable
private MongoPersistentEntity<?> lookupPersistentEntity(String collection) {

View File

@@ -150,7 +150,7 @@ class DefaultScriptOperations implements ScriptOperations {
return args;
}
List<Object> convertedValues = new ArrayList<>(args.length);
List<Object> convertedValues = new ArrayList<Object>(args.length);
for (Object arg : args) {
convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg)

View File

@@ -17,11 +17,9 @@ package org.springframework.data.mongodb.core;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Optional;
import org.bson.BsonNull;
import org.bson.Document;
import org.springframework.core.convert.ConversionService;
import org.springframework.dao.InvalidDataAccessApiUsageException;
@@ -30,8 +28,6 @@ import org.springframework.data.mapping.IdentifierAccessor;
import org.springframework.data.mapping.MappingException;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.PersistentPropertyAccessor;
import org.springframework.data.mapping.PersistentPropertyPath;
import org.springframework.data.mapping.PropertyPath;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
@@ -48,11 +44,9 @@ import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.timeseries.Granularity;
import org.springframework.data.mongodb.core.validation.Validator;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.data.projection.EntityProjection;
import org.springframework.data.projection.EntityProjectionIntrospector;
import org.springframework.data.projection.ProjectionFactory;
import org.springframework.data.projection.TargetAware;
import org.springframework.data.util.Optionals;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
@@ -120,19 +114,15 @@ class EntityOperations {
Assert.notNull(entity, "Bean must not be null");
if (entity instanceof TargetAware targetAware) {
return new SimpleMappedEntity((Map<String, Object>) targetAware.getTarget(), this);
}
if (entity instanceof String) {
return new UnmappedEntity(parse(entity.toString()), this);
return new UnmappedEntity(parse(entity.toString()));
}
if (entity instanceof Map) {
return new SimpleMappedEntity((Map<String, Object>) entity, this);
return new SimpleMappedEntity((Map<String, Object>) entity);
}
return MappedEntity.of(entity, context, this);
return MappedEntity.of(entity, context);
}
/**
@@ -149,14 +139,14 @@ class EntityOperations {
Assert.notNull(conversionService, "ConversionService must not be null");
if (entity instanceof String) {
return new UnmappedEntity(parse(entity.toString()), this);
return new UnmappedEntity(parse(entity.toString()));
}
if (entity instanceof Map) {
return new SimpleMappedEntity((Map<String, Object>) entity, this);
return new SimpleMappedEntity((Map<String, Object>) entity);
}
return AdaptibleMappedEntity.of(entity, context, conversionService, this);
return AdaptibleMappedEntity.of(entity, context, conversionService);
}
/**
@@ -293,11 +283,6 @@ class EntityOperations {
* @see EntityProjectionIntrospector#introspect(Class, Class)
*/
public <M, D> EntityProjection<M, D> introspectProjection(Class<M> resultType, Class<D> entityType) {
MongoPersistentEntity<?> persistentEntity = queryMapper.getMappingContext().getPersistentEntity(entityType);
if (persistentEntity == null && !resultType.isInterface() || ClassUtils.isAssignable(Document.class, resultType)) {
return (EntityProjection) EntityProjection.nonProjecting(resultType);
}
return introspector.introspect(resultType, entityType);
}
@@ -377,7 +362,6 @@ class EntityOperations {
* A representation of information about an entity.
*
* @author Oliver Gierke
* @author Christoph Strobl
* @since 2.1
*/
interface Entity<T> {
@@ -396,16 +380,6 @@ class EntityOperations {
*/
Object getId();
/**
* Returns the property value for {@code key}.
*
* @param key
* @return
* @since 4.1
*/
@Nullable
Object getPropertyValue(String key);
/**
* Returns the {@link Query} to find the entity by its identifier.
*
@@ -476,15 +450,6 @@ class EntityOperations {
* @since 2.1.2
*/
boolean isNew();
/**
* @param sortObject
* @return
* @since 4.1
* @throws IllegalStateException if a sort key yields {@literal null}.
*/
Map<String, Object> extractKeys(Document sortObject, Class<?> sourceType);
}
/**
@@ -506,7 +471,7 @@ class EntityOperations {
T populateIdIfNecessary(@Nullable Object id);
/**
* Initializes the version property of the current entity if available.
* Initializes the version property of the of the current entity if available.
*
* @return the entity with the version property updated if available.
*/
@@ -532,11 +497,9 @@ class EntityOperations {
private static class UnmappedEntity<T extends Map<String, Object>> implements AdaptibleEntity<T> {
private final T map;
private final EntityOperations entityOperations;
protected UnmappedEntity(T map, EntityOperations entityOperations) {
protected UnmappedEntity(T map) {
this.map = map;
this.entityOperations = entityOperations;
}
@Override
@@ -546,12 +509,7 @@ class EntityOperations {
@Override
public Object getId() {
return getPropertyValue(ID_FIELD);
}
@Override
public Object getPropertyValue(String key) {
return map.get(key);
return map.get(ID_FIELD);
}
@Override
@@ -575,8 +533,8 @@ class EntityOperations {
@Override
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
return MappedDocument.of(map instanceof Document document //
? document //
return MappedDocument.of(map instanceof Document //
? (Document) map //
: new Document(map));
}
@@ -605,50 +563,12 @@ class EntityOperations {
public boolean isNew() {
return map.get(ID_FIELD) != null;
}
@Override
public Map<String, Object> extractKeys(Document sortObject, Class<?> sourceType) {
Map<String, Object> keyset = new LinkedHashMap<>();
MongoPersistentEntity<?> sourceEntity = entityOperations.context.getPersistentEntity(sourceType);
if (sourceEntity != null && sourceEntity.hasIdProperty()) {
keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId());
} else {
keyset.put(ID_FIELD, getId());
}
for (String key : sortObject.keySet()) {
Object value = resolveValue(key, sourceEntity);
if (value == null) {
throw new IllegalStateException(
String.format("Cannot extract value for key %s because its value is null", key));
}
keyset.put(key, value);
}
return keyset;
}
@Nullable
private Object resolveValue(String key, @Nullable MongoPersistentEntity<?> sourceEntity) {
if (sourceEntity == null) {
return BsonUtils.resolveValue(map, key);
}
PropertyPath from = PropertyPath.from(key, sourceEntity.getTypeInformation());
PersistentPropertyPath<MongoPersistentProperty> persistentPropertyPath = entityOperations.context
.getPersistentPropertyPath(from);
return BsonUtils.resolveValue(map, persistentPropertyPath.toDotPath(p -> p.getFieldName()));
}
}
private static class SimpleMappedEntity<T extends Map<String, Object>> extends UnmappedEntity<T> {
protected SimpleMappedEntity(T map, EntityOperations entityOperations) {
super(map, entityOperations);
protected SimpleMappedEntity(T map) {
super(map);
}
@Override
@@ -656,8 +576,8 @@ class EntityOperations {
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
T bean = getBean();
bean = (T) (bean instanceof Document document//
? document //
bean = (T) (bean instanceof Document //
? (Document) bean //
: new Document(bean));
Document document = new Document();
writer.write(bean, document);
@@ -671,26 +591,23 @@ class EntityOperations {
private final MongoPersistentEntity<?> entity;
private final IdentifierAccessor idAccessor;
private final PersistentPropertyAccessor<T> propertyAccessor;
private final EntityOperations entityOperations;
protected MappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor idAccessor,
PersistentPropertyAccessor<T> propertyAccessor, EntityOperations entityOperations) {
PersistentPropertyAccessor<T> propertyAccessor) {
this.entity = entity;
this.idAccessor = idAccessor;
this.propertyAccessor = propertyAccessor;
this.entityOperations = entityOperations;
}
private static <T> MappedEntity<T> of(T bean,
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
EntityOperations entityOperations) {
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
return new MappedEntity<>(entity, identifierAccessor, propertyAccessor, entityOperations);
return new MappedEntity<>(entity, identifierAccessor, propertyAccessor);
}
@Override
@@ -703,11 +620,6 @@ class EntityOperations {
return idAccessor.getRequiredIdentifier();
}
@Override
public Object getPropertyValue(String key) {
return propertyAccessor.getProperty(entity.getRequiredPersistentProperty(key));
}
@Override
public Query getByIdQuery() {
@@ -785,60 +697,6 @@ class EntityOperations {
public boolean isNew() {
return entity.isNew(propertyAccessor.getBean());
}
@Override
public Map<String, Object> extractKeys(Document sortObject, Class<?> sourceType) {
Map<String, Object> keyset = new LinkedHashMap<>();
MongoPersistentEntity<?> sourceEntity = entityOperations.context.getPersistentEntity(sourceType);
if (sourceEntity != null && sourceEntity.hasIdProperty()) {
keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId());
} else {
keyset.put(entity.getRequiredIdProperty().getName(), getId());
}
for (String key : sortObject.keySet()) {
Object value;
if (key.indexOf('.') != -1) {
// follow the path across nested levels.
// TODO: We should have a MongoDB-specific property path abstraction to allow diving into Document.
value = getNestedPropertyValue(key);
} else {
value = getPropertyValue(key);
}
if (value == null) {
throw new IllegalStateException(
String.format("Cannot extract value for key %s because its value is null", key));
}
keyset.put(key, value);
}
return keyset;
}
@Nullable
private Object getNestedPropertyValue(String key) {
String[] segments = key.split("\\.");
Entity<?> currentEntity = this;
Object currentValue = BsonNull.VALUE;
for (int i = 0; i < segments.length; i++) {
String segment = segments[i];
currentValue = currentEntity.getPropertyValue(segment);
if (i < segments.length - 1) {
currentEntity = entityOperations.forEntity(currentValue);
}
}
return currentValue != null ? currentValue : BsonNull.VALUE;
}
}
private static class AdaptibleMappedEntity<T> extends MappedEntity<T> implements AdaptibleEntity<T> {
@@ -848,9 +706,9 @@ class EntityOperations {
private final IdentifierAccessor identifierAccessor;
private AdaptibleMappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor identifierAccessor,
ConvertingPropertyAccessor<T> propertyAccessor, EntityOperations entityOperations) {
ConvertingPropertyAccessor<T> propertyAccessor) {
super(entity, identifierAccessor, propertyAccessor, entityOperations);
super(entity, identifierAccessor, propertyAccessor);
this.entity = entity;
this.propertyAccessor = propertyAccessor;
@@ -859,14 +717,14 @@ class EntityOperations {
private static <T> AdaptibleEntity<T> of(T bean,
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
ConversionService conversionService, EntityOperations entityOperations) {
ConversionService conversionService) {
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
return new AdaptibleMappedEntity<>(entity, identifierAccessor,
new ConvertingPropertyAccessor<>(propertyAccessor, conversionService), entityOperations);
new ConvertingPropertyAccessor<>(propertyAccessor, conversionService));
}
@Nullable
@@ -967,14 +825,6 @@ class EntityOperations {
* @since 3.3
*/
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
/**
* @return the name of the id field.
* @since 4.1
*/
default String getIdKeyName() {
return ID_FIELD;
}
}
/**
@@ -1097,11 +947,6 @@ class EntityOperations {
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
return persistentProperty != null ? persistentProperty.getFieldName() : name;
}
@Override
public String getIdKeyName() {
return entity.getIdProperty().getName();
}
}
}

View File

@@ -98,7 +98,9 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper
return collection;
}
if (aggregation instanceof TypedAggregation typedAggregation) {
if (aggregation instanceof TypedAggregation) {
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
if (typedAggregation.getInputType() != null) {
return template.getCollectionName(typedAggregation.getInputType());

View File

@@ -20,9 +20,6 @@ import java.util.Optional;
import java.util.stream.Stream;
import org.springframework.dao.DataAccessException;
import org.springframework.data.domain.KeysetScrollPosition;
import org.springframework.data.domain.ScrollPosition;
import org.springframework.data.domain.Window;
import org.springframework.data.geo.GeoResults;
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
import org.springframework.data.mongodb.core.query.NearQuery;
@@ -127,28 +124,12 @@ public interface ExecutableFindOperation {
Stream<T> stream();
/**
* Return a window of elements either starting or resuming at
* {@link org.springframework.data.domain.ScrollPosition}.
* <p>
* When using {@link KeysetScrollPosition}, make sure to use non-nullable
* {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct
* a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators.
*
* @param scrollPosition the scroll position.
* @return a window of the resulting elements.
* @since 4.1
* @see org.springframework.data.domain.OffsetScrollPosition
* @see org.springframework.data.domain.KeysetScrollPosition
*/
Window<T> scroll(ScrollPosition scrollPosition);
/**
* Get the number of matching elements. <br />
* This method uses an
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but
* guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications
* needs use {@link MongoOperations#estimatedCount(String)} for empty queries instead.
* Get the number of matching elements.
* <br />
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation
* execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard,
* session and transaction compliance. In case an inaccurate count satisfies the applications needs use
* {@link MongoOperations#estimatedCount(String)} for empty queries instead.
*
* @return total number of matching elements.
*/

View File

@@ -20,9 +20,8 @@ import java.util.Optional;
import java.util.stream.Stream;
import org.bson.Document;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.data.domain.Window;
import org.springframework.data.domain.ScrollPosition;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.SerializationUtils;
@@ -73,8 +72,8 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
private final @Nullable String collection;
private final Query query;
ExecutableFindSupport(MongoTemplate template, Class<?> domainType, Class<T> returnType, @Nullable String collection,
Query query) {
ExecutableFindSupport(MongoTemplate template, Class<?> domainType, Class<T> returnType,
@Nullable String collection, Query query) {
this.template = template;
this.domainType = domainType;
this.returnType = returnType;
@@ -140,11 +139,6 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
return doStream();
}
@Override
public Window<T> scroll(ScrollPosition scrollPosition) {
return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName());
}
@Override
public TerminatingFindNear<T> near(NearQuery nearQuery) {
return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType);
@@ -174,8 +168,8 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
Document queryObject = query.getQueryObject();
Document fieldsObject = query.getFieldsObject();
return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType,
returnType, getCursorPreparer(query, preparer));
return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType,
getCursorPreparer(query, preparer));
}
private List<T> doFindDistinct(String field) {

View File

@@ -76,7 +76,7 @@ public interface ExecutableRemoveOperation {
/**
* Remove and return all matching documents. <br/>
* <strong>NOTE:</strong> The entire list of documents will be fetched before sending the actual delete commands.
* <strong>NOTE</strong> The entire list of documents will be fetched before sending the actual delete commands.
* Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete
* operation.
*

View File

@@ -87,23 +87,6 @@ public interface ExecutableUpdateOperation {
T findAndModifyValue();
}
/**
* Trigger <a href="https://docs.mongodb.com/manual/reference/method/db.collection.replaceOne/">replaceOne</a>
* execution by calling one of the terminating methods.
*
* @author Christoph Strobl
* @since 4.2
*/
interface TerminatingReplace {
/**
* Find first and replace/upsert.
*
* @return never {@literal null}.
*/
UpdateResult replaceFirst();
}
/**
* Trigger
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
@@ -112,7 +95,7 @@ public interface ExecutableUpdateOperation {
* @author Mark Paluch
* @since 2.1
*/
interface TerminatingFindAndReplace<T> extends TerminatingReplace {
interface TerminatingFindAndReplace<T> {
/**
* Find, replace and return the first matching document.
@@ -260,22 +243,6 @@ public interface ExecutableUpdateOperation {
TerminatingFindAndModify<T> withOptions(FindAndModifyOptions options);
}
/**
* @author Christoph Strobl
* @since 4.2
*/
interface ReplaceWithOptions extends TerminatingReplace {
/**
* Explicitly define {@link ReplaceOptions}.
*
* @param options must not be {@literal null}.
* @return new instance of {@link FindAndReplaceOptions}.
* @throws IllegalArgumentException if options is {@literal null}.
*/
TerminatingReplace withOptions(ReplaceOptions options);
}
/**
* Define {@link FindAndReplaceOptions}.
*
@@ -283,7 +250,7 @@ public interface ExecutableUpdateOperation {
* @author Christoph Strobl
* @since 2.1
*/
interface FindAndReplaceWithOptions<T> extends TerminatingFindAndReplace<T>, ReplaceWithOptions {
interface FindAndReplaceWithOptions<T> extends TerminatingFindAndReplace<T> {
/**
* Explicitly define {@link FindAndReplaceOptions} for the {@link Update}.

View File

@@ -126,17 +126,6 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
options, replacement, targetType);
}
@Override
public TerminatingReplace withOptions(ReplaceOptions options) {
FindAndReplaceOptions target = new FindAndReplaceOptions();
if (options.isUpsert()) {
target.upsert();
}
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
target, replacement, targetType);
}
@Override
public UpdateWithUpdate<T> matching(Query query) {
@@ -186,18 +175,6 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
getCollectionName(), targetType);
}
@Override
public UpdateResult replaceFirst() {
if (replacement != null) {
return template.replace(query, domainType, replacement,
findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName());
}
return template.replace(query, domainType, update,
findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName());
}
private UpdateResult doUpdate(boolean multi, boolean upsert) {
return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi);
}

View File

@@ -31,9 +31,10 @@ package org.springframework.data.mongodb.core;
* @author Christoph Strobl
* @since 2.1
*/
public class FindAndReplaceOptions extends ReplaceOptions {
public class FindAndReplaceOptions {
private boolean returnNew;
private boolean upsert;
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
@@ -108,7 +109,7 @@ public class FindAndReplaceOptions extends ReplaceOptions {
*/
public FindAndReplaceOptions upsert() {
super.upsert();
this.upsert = true;
return this;
}
@@ -121,4 +122,13 @@ public class FindAndReplaceOptions extends ReplaceOptions {
return returnNew;
}
/**
* Get the bit indicating if to create a new document if not exists.
*
* @return {@literal true} if set.
*/
public boolean isUpsert() {
return upsert;
}
}

View File

@@ -27,7 +27,6 @@ import org.springframework.util.StringUtils;
* Function object to apply a query hint. Can be an index name or a BSON document.
*
* @author Mark Paluch
* @author Christoph Strobl
* @since 4.1
*/
class HintFunction {
@@ -68,32 +67,6 @@ class HintFunction {
return (hint instanceof String hintString && StringUtils.hasText(hintString)) || hint instanceof Bson;
}
/**
* If a hint is not present, returns {@code true}, otherwise {@code false}.
*
* @return {@code true} if a hint is not present, otherwise {@code false}.
*/
public boolean isEmpty() {
return !isPresent();
}
/**
* Apply the hint to consumers depending on the hint format if {@link #isPresent() present}.
*
* @param registryProvider
* @param stringConsumer
* @param bsonConsumer
* @param <R>
*/
public <R> void ifPresent(@Nullable CodecRegistryProvider registryProvider, Function<String, R> stringConsumer,
Function<Bson, R> bsonConsumer) {
if (isEmpty()) {
return;
}
apply(registryProvider, stringConsumer, bsonConsumer);
}
/**
* Apply the hint to consumers depending on the hint format.
*
@@ -106,7 +79,7 @@ class HintFunction {
public <R> R apply(@Nullable CodecRegistryProvider registryProvider, Function<String, R> stringConsumer,
Function<Bson, R> bsonConsumer) {
if (isEmpty()) {
if (!isPresent()) {
throw new IllegalStateException("No hint present");
}

View File

@@ -119,10 +119,6 @@ abstract class IndexConverters {
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
}
if (indexOptions.containsKey("hidden")) {
ops = ops.hidden((Boolean) indexOptions.get("hidden"));
}
return ops;
};
}

View File

@@ -203,9 +203,8 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required));
}
}
JsonSchemaProperty schemaProperty = targetProperties.size() == 1 ? targetProperties.iterator().next()
return targetProperties.size() == 1 ? targetProperties.iterator().next()
: JsonSchemaProperty.merged(targetProperties);
return applyEncryptionDataIfNecessary(property, schemaProperty);
}
}
@@ -323,7 +322,7 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
private TypedJsonSchemaObject createSchemaObject(Object type, Collection<?> possibleValues) {
TypedJsonSchemaObject schemaObject = type instanceof Type typeObject ? JsonSchemaObject.of(typeObject)
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
: JsonSchemaObject.of(Class.class.cast(type));
if (!CollectionUtils.isEmpty(possibleValues)) {
@@ -332,22 +331,23 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
return schemaObject;
}
private String computePropertyFieldName(PersistentProperty<?> property) {
private String computePropertyFieldName(PersistentProperty property) {
return property instanceof MongoPersistentProperty mongoPersistentProperty ?
mongoPersistentProperty.getFieldName() : property.getName();
return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName()
: property.getName();
}
private boolean isRequiredProperty(PersistentProperty<?> property) {
private boolean isRequiredProperty(PersistentProperty property) {
return property.getType().isPrimitive();
}
private Class<?> computeTargetType(PersistentProperty<?> property) {
if (!(property instanceof MongoPersistentProperty mongoProperty)) {
if (!(property instanceof MongoPersistentProperty)) {
return property.getType();
}
MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property;
if (!mongoProperty.isIdProperty()) {
return mongoProperty.getFieldType();
}

View File

@@ -21,10 +21,9 @@ package org.springframework.data.mongodb.core;
*
* @author Mark Pollack
* @author Oliver Gierke
* @author Christoph Strobl
* @see MongoAction
*/
public enum MongoActionOperation {
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK, REPLACE;
REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK;
}

View File

@@ -99,12 +99,12 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
if (DATA_INTEGRITY_EXCEPTIONS.contains(exception)) {
if (ex instanceof MongoServerException mse) {
if (mse.getCode() == 11000) {
if (ex instanceof MongoServerException) {
if (((MongoServerException) ex).getCode() == 11000) {
return new DuplicateKeyException(ex.getMessage(), ex);
}
if (ex instanceof MongoBulkWriteException bulkException) {
for (BulkWriteError x : bulkException.getWriteErrors()) {
if (ex instanceof MongoBulkWriteException) {
for (BulkWriteError x : ((MongoBulkWriteException) ex).getWriteErrors()) {
if (x.getCode() == 11000) {
return new DuplicateKeyException(ex.getMessage(), ex);
}
@@ -116,9 +116,9 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
}
// All other MongoExceptions
if (ex instanceof MongoException mongoException) {
if (ex instanceof MongoException) {
int code = mongoException.getCode();
int code = ((MongoException) ex).getCode();
if (MongoDbErrorCodes.isDuplicateKeyCode(code)) {
return new DuplicateKeyException(ex.getMessage(), ex);

View File

@@ -23,8 +23,6 @@ import java.util.function.Supplier;
import java.util.stream.Stream;
import org.bson.Document;
import org.springframework.data.domain.KeysetScrollPosition;
import org.springframework.data.domain.Window;
import org.springframework.data.geo.GeoResults;
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
@@ -40,7 +38,6 @@ import org.springframework.data.mongodb.core.index.IndexOperations;
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
@@ -119,7 +116,7 @@ public interface MongoOperations extends FluentMongoOperations {
/**
* Execute a MongoDB query and iterate over the query results on a per-document basis with a DocumentCallbackHandler.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification. Must not be {@literal null}.
* @param collectionName name of the collection to retrieve the objects from.
* @param dch the handler that will extract results, one document at a time.
@@ -225,7 +222,7 @@ public interface MongoOperations extends FluentMongoOperations {
* <p>
* Returns a {@link String} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification. Must not be {@literal null}.
* @param entityType must not be {@literal null}.
* @param <T> element return type
@@ -241,7 +238,7 @@ public interface MongoOperations extends FluentMongoOperations {
* <p>
* Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification. Must not be {@literal null}.
* @param entityType must not be {@literal null}.
* @param collectionName must not be {@literal null} or empty.
@@ -322,8 +319,7 @@ public interface MongoOperations extends FluentMongoOperations {
* @param options additional settings to apply when creating the view. Can be {@literal null}.
* @since 4.0
*/
MongoCollection<Document> createView(String name, Class<?> source, AggregationPipeline pipeline,
@Nullable ViewOptions options);
MongoCollection<Document> createView(String name, Class<?> source, AggregationPipeline pipeline, @Nullable ViewOptions options);
/**
* Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on
@@ -335,8 +331,7 @@ public interface MongoOperations extends FluentMongoOperations {
* @param options additional settings to apply when creating the view. Can be {@literal null}.
* @since 4.0
*/
MongoCollection<Document> createView(String name, String source, AggregationPipeline pipeline,
@Nullable ViewOptions options);
MongoCollection<Document> createView(String name, String source, AggregationPipeline pipeline, @Nullable ViewOptions options);
/**
* A set of collection names.
@@ -723,7 +718,7 @@ public interface MongoOperations extends FluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned list.
* @return the converted object.
@@ -739,7 +734,7 @@ public interface MongoOperations extends FluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from.
@@ -753,7 +748,7 @@ public interface MongoOperations extends FluentMongoOperations {
* <strong>NOTE:</strong> Any additional support for query/field mapping, etc. is not available due to the lack of
* domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support.
*
* @param query the {@link Query} class that specifies the criteria used to find a document.
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param collectionName name of the collection to check for objects.
* @return {@literal true} if the query yields a result.
*/
@@ -762,7 +757,7 @@ public interface MongoOperations extends FluentMongoOperations {
/**
* Determine result of given {@link Query} contains at least one element.
*
* @param query the {@link Query} class that specifies the criteria used to find a document.
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param entityClass the parametrized type.
* @return {@literal true} if the query yields a result.
*/
@@ -771,7 +766,7 @@ public interface MongoOperations extends FluentMongoOperations {
/**
* Determine result of given {@link Query} contains at least one element.
*
* @param query the {@link Query} class that specifies the criteria used to find a document.
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param entityClass the parametrized type. Can be {@literal null}.
* @param collectionName name of the collection to check for objects.
* @return {@literal true} if the query yields a result.
@@ -785,7 +780,7 @@ public interface MongoOperations extends FluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification. Must not be {@literal null}.
* @param entityClass the parametrized type of the returned list. Must not be {@literal null}.
* @return the List of converted objects.
@@ -799,7 +794,7 @@ public interface MongoOperations extends FluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification. Must not be {@literal null}.
* @param entityClass the parametrized type of the returned list. Must not be {@literal null}.
* @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}.
@@ -807,57 +802,6 @@ public interface MongoOperations extends FluentMongoOperations {
*/
<T> List<T> find(Query query, Class<T> entityClass, String collectionName);
/**
* Query for a window of objects of type T from the specified collection. <br />
* Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with
* {@link Query#limit(int)} to limit large query results for efficient scrolling. <br />
* Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}.
* Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
* <p>
* When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort
* sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or
* {@code null} values through {@code $gt/$lt} operators.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* specification. Must not be {@literal null}.
* @param entityType the parametrized type of the returned window.
* @return the converted window.
* @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid
* position.
* @since 4.1
* @see Query#with(org.springframework.data.domain.OffsetScrollPosition)
* @see Query#with(org.springframework.data.domain.KeysetScrollPosition)
*/
<T> Window<T> scroll(Query query, Class<T> entityType);
/**
* Query for a window of objects of type T from the specified collection. <br />
* Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with
* {@link Query#limit(int)} to limit large query results for efficient scrolling. <br />
* Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}.
* Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
* <p>
* When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort
* sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or
* {@code null} values through {@code $gt/$lt} operators.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* specification. Must not be {@literal null}.
* @param entityType the parametrized type of the returned window.
* @param collectionName name of the collection to retrieve the objects from.
* @return the converted window.
* @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid
* position.
* @since 4.1
* @see Query#with(org.springframework.data.domain.OffsetScrollPosition)
* @see Query#with(org.springframework.data.domain.KeysetScrollPosition)
*/
<T> Window<T> scroll(Query query, Class<T> entityType, String collectionName);
/**
* Returns a document with the given id mapped onto the given class. The collection the query is ran against will be
* derived from the given target class as well.
@@ -943,7 +887,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
* @param entityClass the parametrized type. Must not be {@literal null}.
@@ -959,7 +903,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
* @param entityClass the parametrized type. Must not be {@literal null}.
@@ -977,7 +921,7 @@ public interface MongoOperations extends FluentMongoOperations {
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
* @param update the {@link UpdateDefinition} to apply on matching documents.
* @param options the {@link FindAndModifyOptions} holding additional information.
@@ -997,7 +941,7 @@ public interface MongoOperations extends FluentMongoOperations {
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -1023,7 +967,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the converted object that was updated or {@literal null}, if not found.
@@ -1044,7 +988,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param collectionName the collection to query. Must not be {@literal null}.
@@ -1063,7 +1007,7 @@ public interface MongoOperations extends FluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account.<br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -1086,7 +1030,7 @@ public interface MongoOperations extends FluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account.<br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -1109,7 +1053,7 @@ public interface MongoOperations extends FluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account.<br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -1134,7 +1078,7 @@ public interface MongoOperations extends FluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account.<br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -1164,7 +1108,7 @@ public interface MongoOperations extends FluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account.<br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -1189,7 +1133,7 @@ public interface MongoOperations extends FluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned list.
* @return the converted object
@@ -1206,7 +1150,7 @@ public interface MongoOperations extends FluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from.
@@ -1231,7 +1175,7 @@ public interface MongoOperations extends FluentMongoOperations {
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
* @return the count of matching documents.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given type.
* {@link #getCollectionName(Class) derived} from the given type.
* @see #exactCount(Query, Class)
* @see #estimatedCount(Class)
*/
@@ -1491,7 +1435,7 @@ public interface MongoOperations extends FluentMongoOperations {
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
*
* @param query the query document that specifies the criteria used to select a document to be upserted. Must not be
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing object. Must not be {@literal null}.
@@ -1514,7 +1458,7 @@ public interface MongoOperations extends FluentMongoOperations {
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
*
* @param query the query document that specifies the criteria used to select a document to be upserted. Must not be
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing object. Must not be {@literal null}.
@@ -1530,7 +1474,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
* combining the query document and the update document.
*
* @param query the query document that specifies the criteria used to select a document to be upserted. Must not be
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing object. Must not be {@literal null}.
@@ -1547,7 +1491,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Updates the first object that is found in the collection of the entity class that matches the query document with
* the provided update document.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1570,7 +1514,7 @@ public interface MongoOperations extends FluentMongoOperations {
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
* Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1586,7 +1530,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Updates the first object that is found in the specified collection that matches the query document criteria with
* the provided updated document. <br />
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1603,7 +1547,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
* with the provided updated document.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1624,7 +1568,7 @@ public interface MongoOperations extends FluentMongoOperations {
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
* support.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1640,7 +1584,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
* with the provided updated document.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1673,7 +1617,7 @@ public interface MongoOperations extends FluentMongoOperations {
* acknowledged} remove operation was successful or not.
*
* @param object must not be {@literal null}.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
*/
DeleteResult remove(Object object, String collectionName);
@@ -1682,7 +1626,7 @@ public interface MongoOperations extends FluentMongoOperations {
* Remove all documents that match the provided query document criteria from the collection used to store the
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
*
* @param query the query document that specifies the criteria used to remove a document.
* @param query the query document that specifies the criteria used to remove a record.
* @param entityClass class that determines the collection to use.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws IllegalArgumentException when {@literal query} or {@literal entityClass} is {@literal null}.
@@ -1695,9 +1639,9 @@ public interface MongoOperations extends FluentMongoOperations {
* Remove all documents that match the provided query document criteria from the collection used to store the
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
*
* @param query the query document that specifies the criteria used to remove a document.
* @param query the query document that specifies the criteria used to remove a record.
* @param entityClass class of the pojo to be operated on. Can be {@literal null}.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws IllegalArgumentException when {@literal query}, {@literal entityClass} or {@literal collectionName} is
* {@literal null}.
@@ -1710,8 +1654,8 @@ public interface MongoOperations extends FluentMongoOperations {
* <strong>NOTE:</strong> Any additional support for field mapping is not available due to the lack of domain type
* information. Use {@link #remove(Query, Class, String)} to get full type specific support.
*
* @param query the query document that specifies the criteria used to remove a document.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param query the query document that specifies the criteria used to remove a record.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws IllegalArgumentException when {@literal query} or {@literal collectionName} is {@literal null}.
*/
@@ -1723,7 +1667,7 @@ public interface MongoOperations extends FluentMongoOperations {
* information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support.
*
* @param query the query document that specifies the criteria used to find and remove documents.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link List} converted objects deleted by this operation.
* @since 1.5
*/
@@ -1748,79 +1692,12 @@ public interface MongoOperations extends FluentMongoOperations {
*
* @param query the query document that specifies the criteria used to find and remove documents.
* @param entityClass class of the pojo to be operated on.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link List} converted objects deleted by this operation.
* @since 1.5
*/
<T> List<T> findAllAndRemove(Query query, Class<T> entityClass, String collectionName);
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document. <br />
* The collection name is derived from the {@literal replacement} type. <br />
* Options are defaulted to {@link ReplaceOptions#none()}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 4.2
*/
default <T> UpdateResult replace(Query query, T replacement) {
return replace(query, replacement, ReplaceOptions.none());
}
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document. Options are defaulted to {@link ReplaceOptions#none()}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param collectionName the collection to query. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @since 4.2
*/
default <T> UpdateResult replace(Query query, T replacement, String collectionName) {
return replace(query, replacement, ReplaceOptions.none(), collectionName);
}
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document taking {@link ReplaceOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 4.2
*/
default <T> UpdateResult replace(Query query, T replacement, ReplaceOptions options) {
return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement)));
}
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document taking {@link ReplaceOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may *
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @since 4.2
*/
<T> UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName);
/**
* Returns the underlying {@link MongoConverter}.
*

View File

@@ -21,7 +21,6 @@ import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -29,7 +28,6 @@ import java.util.stream.Collectors;
import org.bson.BsonValue;
import org.bson.Document;
import org.bson.codecs.Codec;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import org.springframework.data.mapping.PropertyPath;
import org.springframework.data.mapping.PropertyReferenceException;
@@ -54,7 +52,6 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.data.mongodb.core.mapping.ShardKey;
import org.springframework.data.mongodb.core.query.BasicQuery;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.core.query.Meta;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
@@ -193,15 +190,6 @@ class QueryOperations {
return new UpdateContext(replacement, upsert);
}
/**
* @param replacement the {@link MappedDocument mapped replacement} document.
* @param upsert use {@literal true} to insert diff when no existing document found.
* @return new instance of {@link UpdateContext}.
*/
UpdateContext replaceSingleContext(Query query, MappedDocument replacement, boolean upsert) {
return new UpdateContext(query, replacement, upsert);
}
/**
* Create a new {@link DeleteContext} instance removing all matching documents.
*
@@ -399,12 +387,12 @@ class QueryOperations {
for (Entry<String, Object> entry : fields.entrySet()) {
if (entry.getValue()instanceof MongoExpression mongoExpression) {
if (entry.getValue() instanceof MongoExpression) {
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
evaluated.put(entry.getKey(), AggregationExpression.from(mongoExpression).toDocument(ctx));
evaluated.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
} else {
evaluated.put(entry.getKey(), entry.getValue());
}
@@ -448,25 +436,6 @@ class QueryOperations {
return entityOperations.forType(domainType).getCollation(query) //
.map(Collation::toMongoCollation);
}
/**
* Get the {@link HintFunction} reading the actual hint form the {@link Query}.
*
* @return new instance of {@link HintFunction}.
* @since 4.2
*/
HintFunction getHintFunction() {
return HintFunction.from(query.getHint());
}
/**
* Read and apply the hint from the {@link Query}.
*
* @since 4.2
*/
<R> void applyHint(Function<String, R> stringConsumer, Function<Bson, R> bsonConsumer) {
getHintFunction().ifPresent(codecRegistryProvider, stringConsumer, bsonConsumer);
}
}
/**
@@ -486,7 +455,7 @@ class QueryOperations {
*/
private DistinctQueryContext(@Nullable Object query, String fieldName) {
super(query instanceof Document document ? new BasicQuery(document) : (Query) query);
super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query);
this.fieldName = fieldName;
}
@@ -594,23 +563,10 @@ class QueryOperations {
if (query.getLimit() > 0) {
options.limit(query.getLimit());
}
if (query.getSkip() > 0) {
options.skip((int) query.getSkip());
}
Meta meta = query.getMeta();
if (meta.hasValues()) {
if (meta.hasMaxTime()) {
options.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS);
}
if (meta.hasComment()) {
options.comment(meta.getComment());
}
}
HintFunction hintFunction = HintFunction.from(query.getHint());
if (hintFunction.isPresent()) {
@@ -724,12 +680,8 @@ class QueryOperations {
}
UpdateContext(MappedDocument update, boolean upsert) {
this(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())), update, upsert);
}
UpdateContext(Query query, MappedDocument update, boolean upsert) {
super(query);
super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())));
this.multi = false;
this.upsert = upsert;
this.mappedDocument = update;
@@ -763,7 +715,6 @@ class QueryOperations {
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
}
HintFunction.from(getQuery().getHint()).ifPresent(codecRegistryProvider, options::hintString, options::hint);
applyCollation(domainType, options::collation);
if (callback != null) {
@@ -797,7 +748,6 @@ class QueryOperations {
ReplaceOptions options = new ReplaceOptions();
options.collation(updateOptions.getCollation());
options.upsert(updateOptions.isUpsert());
applyHint(options::hintString, options::hint);
if (callback != null) {
callback.accept(options);
@@ -811,7 +761,7 @@ class QueryOperations {
Document mappedQuery = super.getMappedQuery(domainType);
if (multi && update != null && update.isIsolated() && !mappedQuery.containsKey("$isolated")) {
if (multi && update.isIsolated() && !mappedQuery.containsKey("$isolated")) {
mappedQuery.put("$isolated", 1);
}
@@ -825,7 +775,7 @@ class QueryOperations {
Document filterWithShardKey = new Document(filter);
getMappedShardKeyFields(domainType)
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue((Bson) shardKeySource, key)));
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key)));
return filterWithShardKey;
}
@@ -907,7 +857,7 @@ class QueryOperations {
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
if (update != null && !update.modifies(versionFieldName)) {
if (!update.modifies(versionFieldName)) {
update.inc(versionFieldName);
}
}
@@ -955,10 +905,10 @@ class QueryOperations {
this.aggregation = aggregation;
if (aggregation instanceof TypedAggregation typedAggregation) {
this.inputType = typedAggregation.getInputType();
} else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext typeBasedAggregationOperationContext) {
this.inputType = typeBasedAggregationOperationContext.getType();
if (aggregation instanceof TypedAggregation) {
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
} else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext) {
this.inputType = ((TypeBasedAggregationOperationContext) aggregationOperationContext).getType();
} else {
this.inputType = null;
}
@@ -983,8 +933,8 @@ class QueryOperations {
this.aggregation = aggregation;
if (aggregation instanceof TypedAggregation typedAggregation) {
this.inputType = typedAggregation.getInputType();
if (aggregation instanceof TypedAggregation) {
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
} else {
this.inputType = inputType;
}

View File

@@ -98,7 +98,9 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
return collection;
}
if (aggregation instanceof TypedAggregation typedAggregation) {
if (aggregation instanceof TypedAggregation) {
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
if (typedAggregation.getInputType() != null) {
return template.getCollectionName(typedAggregation.getInputType());

View File

@@ -1,138 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import reactor.core.publisher.Mono;
import java.util.List;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.UpdateDefinition;
import com.mongodb.bulk.BulkWriteResult;
/**
* Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
* operations or list of similar operations in sequence which can then eventually be executed by calling
* {@link #execute()}.
*
* <pre class="code">
* ReactiveMongoOperations ops = …;
*
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
* .insert(newPerson)
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
* .execute();
* </pre>
* <p>
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
* the version field remains not populated.
*
* @author Christoph Strobl
* @since 4.1
*/
public interface ReactiveBulkOperations {
/**
* Add a single insert to the bulk operation.
*
* @param documents the document to insert, must not be {@literal null}.
* @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}.
*/
ReactiveBulkOperations insert(Object documents);
/**
* Add a list of inserts to the bulk operation.
*
* @param documents List of documents to insert, must not be {@literal null}.
* @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}.
*/
ReactiveBulkOperations insert(List<? extends Object> documents);
/**
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
*
* @param query update criteria, must not be {@literal null}.
* @param update {@link UpdateDefinition} operation to perform, must not be {@literal null}.
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
*/
ReactiveBulkOperations updateOne(Query query, UpdateDefinition update);
/**
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
*
* @param query Update criteria.
* @param update Update operation to perform.
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
*/
ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update);
/**
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
* else an insert.
*
* @param query Update criteria.
* @param update Update operation to perform.
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
*/
ReactiveBulkOperations upsert(Query query, UpdateDefinition update);
/**
* Add a single remove operation to the bulk operation.
*
* @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}.
* @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}.
*/
ReactiveBulkOperations remove(Query remove);
/**
* Add a list of remove operations to the bulk operation.
*
* @param removes the remove operations to perform, must not be {@literal null}.
* @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}.
*/
ReactiveBulkOperations remove(List<Query> removes);
/**
* Add a single replace operation to the bulk operation.
*
* @param query Update criteria.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}.
*/
default ReactiveBulkOperations replaceOne(Query query, Object replacement) {
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
}
/**
* Add a single replace operation to the bulk operation.
*
* @param query Update criteria.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
* @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}.
*/
ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
/**
* Execute all bulk operations using the default write concern.
*
* @return a {@link Mono} emitting the result of the bulk operation providing counters for inserts/updates etc.
*/
Mono<BulkWriteResult> execute();
}

View File

@@ -93,10 +93,10 @@ class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperat
return withOptions(builder -> {
if (token instanceof Instant instant) {
builder.resumeAt(instant);
} else if (token instanceof BsonTimestamp bsonTimestamp) {
builder.resumeAt(bsonTimestamp);
if (token instanceof Instant) {
builder.resumeAt((Instant) token);
} else if (token instanceof BsonTimestamp) {
builder.resumeAt((BsonTimestamp) token);
}
});
}
@@ -161,8 +161,8 @@ class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperat
}
options.getFilter().ifPresent(it -> {
if (it instanceof Aggregation aggregation) {
builder.filter(aggregation);
if (it instanceof Aggregation) {
builder.filter((Aggregation) it);
} else {
builder.filter(((List<Document>) it).toArray(new Document[0]));
}

View File

@@ -18,9 +18,6 @@ package org.springframework.data.mongodb.core;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import org.springframework.data.domain.KeysetScrollPosition;
import org.springframework.data.domain.ScrollPosition;
import org.springframework.data.domain.Window;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
import org.springframework.data.mongodb.core.query.NearQuery;
@@ -90,27 +87,14 @@ public interface ReactiveFindOperation {
*/
Flux<T> all();
/**
* Return a scroll of elements either starting or resuming at {@link ScrollPosition}.
* <p>
* When using {@link KeysetScrollPosition}, make sure to use non-nullable
* {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct
* a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators.
*
* @param scrollPosition the scroll position.
* @return a scroll of the resulting elements.
* @since 4.1
* @see org.springframework.data.domain.OffsetScrollPosition
* @see org.springframework.data.domain.KeysetScrollPosition
*/
Mono<Window<T>> scroll(ScrollPosition scrollPosition);
/**
* Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will
* not be completed unless the {@link org.reactivestreams.Subscription} is
* {@link org.reactivestreams.Subscription#cancel() canceled}. <br />
* {@link org.reactivestreams.Subscription#cancel() canceled}.
* <br />
* However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the
* document at the "end" of the collection and then the application deletes that document. <br />
* document at the "end" of the collection and then the application deletes that document.
* <br />
* A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the
* streams will linger and exhaust resources. <br/>
* <strong>NOTE:</strong> Requires a capped collection.
@@ -121,7 +105,8 @@ public interface ReactiveFindOperation {
Flux<T> tail();
/**
* Get the number of matching elements. <br />
* Get the number of matching elements.
* <br />
* This method uses an
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but

View File

@@ -20,9 +20,6 @@ import reactor.core.publisher.Mono;
import org.bson.Document;
import org.springframework.dao.IncorrectResultSizeDataAccessException;
import org.springframework.data.domain.Window;
import org.springframework.data.domain.ScrollPosition;
import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate;
import org.springframework.data.mongodb.core.query.NearQuery;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.SerializationUtils;
@@ -70,8 +67,8 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
private final String collection;
private final Query query;
ReactiveFindSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType, String collection,
Query query) {
ReactiveFindSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
String collection, Query query) {
this.template = template;
this.domainType = domainType;
@@ -139,11 +136,6 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
return doFind(null);
}
@Override
public Mono<Window<T>> scroll(ScrollPosition scrollPosition) {
return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName());
}
@Override
public Flux<T> tail() {
return doFind(template.new TailingQueryFindPublisherPreparer(query, domainType));
@@ -177,8 +169,8 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
Document queryObject = query.getQueryObject();
Document fieldsObject = query.getFieldsObject();
return template.doFind(getCollectionName(), ReactiveCollectionPreparerDelegate.of(query), queryObject,
fieldsObject, domainType, returnType, preparer != null ? preparer : getCursorPreparer(query));
return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType,
preparer != null ? preparer : getCursorPreparer(query));
}
@SuppressWarnings("unchecked")

View File

@@ -15,7 +15,6 @@
*/
package org.springframework.data.mongodb.core;
import org.springframework.data.mongodb.core.query.Collation;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -26,11 +25,9 @@ import java.util.function.Supplier;
import org.bson.Document;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscription;
import org.springframework.data.domain.KeysetScrollPosition;
import org.springframework.data.domain.Window;
import org.springframework.data.geo.GeoResult;
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
@@ -282,8 +279,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param options additional settings to apply when creating the view. Can be {@literal null}.
* @since 4.0
*/
Mono<MongoCollection<Document>> createView(String name, Class<?> source, AggregationPipeline pipeline,
@Nullable ViewOptions options);
Mono<MongoCollection<Document>> createView(String name, Class<?> source, AggregationPipeline pipeline, @Nullable ViewOptions options);
/**
* Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on
@@ -295,8 +291,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* @param options additional settings to apply when creating the view. Can be {@literal null}.
* @since 4.0
*/
Mono<MongoCollection<Document>> createView(String name, String source, AggregationPipeline pipeline,
@Nullable ViewOptions options);
Mono<MongoCollection<Document>> createView(String name, String source, AggregationPipeline pipeline, @Nullable ViewOptions options);
/**
* A set of collection names.
@@ -351,40 +346,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*/
Mono<Void> dropCollection(String collectionName);
/**
* Returns a new {@link ReactiveBulkOperations} for the given collection. <br />
* <strong>NOTE:</strong> Any additional support for field mapping, etc. is not available for {@literal update} or
* {@literal remove} operations in bulk mode due to the lack of domain type information. Use
* {@link #bulkOps(BulkMode, Class, String)} to get full type specific support.
*
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
* @return {@link ReactiveBulkOperations} on the named collection
* @since 4.1
*/
ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName);
/**
* Returns a new {@link ReactiveBulkOperations} for the given entity type.
*
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
* @param entityClass the name of the entity class, must not be {@literal null}.
* @return {@link ReactiveBulkOperations} on the named collection associated of the given entity class.
* @since 4.1
*/
ReactiveBulkOperations bulkOps(BulkMode mode, Class<?> entityClass);
/**
* Returns a new {@link ReactiveBulkOperations} for the given entity type and collection name.
*
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
* @param entityType the name of the entity class. Can be {@literal null}.
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
* @return {@link ReactiveBulkOperations} on the named collection associated with the given entity class.
* @since 4.1
*/
ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class<?> entityType, String collectionName);
/**
* Query for a {@link Flux} of objects of type T from the collection used by the entity class. <br />
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
@@ -418,7 +379,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned {@link Mono}.
* @return the converted object.
@@ -433,7 +394,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned {@link Mono}.
* @param collectionName name of the collection to retrieve the objects from.
@@ -446,7 +407,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* <strong>NOTE:</strong> Any additional support for query/field mapping, etc. is not available due to the lack of
* domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support.
*
* @param query the {@link Query} class that specifies the criteria used to find a document.
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param collectionName name of the collection to check for objects.
* @return {@literal true} if the query yields a result.
*/
@@ -455,7 +416,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
/**
* Determine result of given {@link Query} contains at least one element.
*
* @param query the {@link Query} class that specifies the criteria used to find a document.
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param entityClass the parametrized type.
* @return {@literal true} if the query yields a result.
*/
@@ -464,7 +425,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
/**
* Determine result of given {@link Query} contains at least one element.
*
* @param query the {@link Query} class that specifies the criteria used to find a document.
* @param query the {@link Query} class that specifies the criteria used to find a record.
* @param entityClass the parametrized type. Can be {@literal null}.
* @param collectionName name of the collection to check for objects.
* @return {@literal true} if the query yields a result.
@@ -479,7 +440,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification. Must not be {@literal null}.
* @param entityClass the parametrized type of the returned {@link Flux}. Must not be {@literal null}.
* @return the {@link Flux} of converted objects.
@@ -493,7 +454,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification. Must not be {@literal null}.
* @param entityClass the parametrized type of the returned {@link Flux}.
* @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}.
@@ -501,57 +462,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*/
<T> Flux<T> find(Query query, Class<T> entityClass, String collectionName);
/**
* Query for a scroll of objects of type T from the specified collection. <br />
* Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with
* {@link Query#limit(int)} to limit large query results for efficient scrolling. <br />
* Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}.
* Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
* <p>
* When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort
* sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or
* {@code null} values through {@code $gt/$lt} operators.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* specification. Must not be {@literal null}.
* @param entityType the parametrized type of the returned list.
* @return {@link Mono} emitting the converted window.
* @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid
* position.
* @since 4.1
* @see Query#with(org.springframework.data.domain.OffsetScrollPosition)
* @see Query#with(org.springframework.data.domain.KeysetScrollPosition)
*/
<T> Mono<Window<T>> scroll(Query query, Class<T> entityType);
/**
* Query for a window of objects of type T from the specified collection. <br />
* Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with
* {@link Query#limit(int)} to limit large query results for efficient scrolling. <br />
* Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}.
* Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
* to map objects since the test for class type is done in the client and not on the server.
* <p>
* When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort
* sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or
* {@code null} values through {@code $gt/$lt} operators.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* specification. Must not be {@literal null}.
* @param entityType the parametrized type of the returned list.
* @param collectionName name of the collection to retrieve the objects from.
* @return {@link Mono} emitting the converted window.
* @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid
* position.
* @since 4.1
* @see Query#with(org.springframework.data.domain.OffsetScrollPosition)
* @see Query#with(org.springframework.data.domain.KeysetScrollPosition)
*/
<T> Mono<Window<T>> scroll(Query query, Class<T> entityType, String collectionName);
/**
* Returns a document with the given id mapped onto the given class. The collection the query is ran against will be
* derived from the given target class as well.
@@ -759,7 +669,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
* @param entityClass the parametrized type. Must not be {@literal null}.
@@ -774,7 +684,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
* @param entityClass the parametrized type. Must not be {@literal null}.
@@ -791,7 +701,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification.
* @param update the {@link UpdateDefinition} to apply on matching documents.
* @param options the {@link FindAndModifyOptions} holding additional information.
@@ -809,7 +719,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
* {@link FindAndModifyOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -832,7 +742,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the converted object that was updated or {@link Mono#empty()}, if not found.
@@ -852,7 +762,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param collectionName the collection to query. Must not be {@literal null}.
@@ -870,7 +780,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -892,7 +802,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -914,7 +824,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -938,7 +848,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -967,7 +877,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* taking {@link FindAndReplaceOptions} into account. <br />
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an optional
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
* fields specification. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
@@ -992,7 +902,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned {@link Mono}.
* @return the converted object
@@ -1008,7 +918,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned {@link Mono}.
* @param collectionName name of the collection to retrieve the objects from.
@@ -1370,7 +1280,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
*
* @param query the query document that specifies the criteria used to select a document to be upserted. Must not be
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing object. Must not be {@literal null}.
@@ -1391,7 +1301,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
* support.
*
* @param query the query document that specifies the criteria used to select a document to be upserted. Must not be
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing object. Must not be {@literal null}.
@@ -1407,7 +1317,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
* combining the query document and the update document.
*
* @param query the query document that specifies the criteria used to select a document to be upserted. Must not be
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing object. Must not be {@literal null}.
@@ -1426,7 +1336,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1449,7 +1359,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
* Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1465,7 +1375,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Updates the first object that is found in the specified collection that matches the query document criteria with
* the provided updated document. <br />
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1482,7 +1392,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
* with the provided updated document.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1503,7 +1413,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
* support.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1519,7 +1429,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
* with the provided updated document.
*
* @param query the query document that specifies the criteria used to select a document to be updated. Must not be
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
* {@literal null}.
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
* the existing. Must not be {@literal null}.
@@ -1546,7 +1456,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Removes the given object from the given collection.
*
* @param object must not be {@literal null}.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
*/
Mono<DeleteResult> remove(Object object, String collectionName);
@@ -1565,7 +1475,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Removes the given object from the given collection.
*
* @param objectToRemove must not be {@literal null}.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
*/
Mono<DeleteResult> remove(Mono<? extends Object> objectToRemove, String collectionName);
@@ -1574,7 +1484,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Remove all documents that match the provided query document criteria from the collection used to store the
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
*
* @param query the query document that specifies the criteria used to remove a document.
* @param query the query document that specifies the criteria used to remove a record.
* @param entityClass class that determines the collection to use.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
* @throws org.springframework.data.mapping.MappingException if the target collection name cannot be
@@ -1586,9 +1496,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* Remove all documents that match the provided query document criteria from the collection used to store the
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
*
* @param query the query document that specifies the criteria used to remove a document.
* @param query the query document that specifies the criteria used to remove a record.
* @param entityClass class of the pojo to be operated on. Can be {@literal null}.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
*/
Mono<DeleteResult> remove(Query query, @Nullable Class<?> entityClass, String collectionName);
@@ -1599,8 +1509,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* <strong>NOTE:</strong> Any additional support for field mapping is not available due to the lack of domain type
* information. Use {@link #remove(Query, Class, String)} to get full type specific support.
*
* @param query the query document that specifies the criteria used to remove a document.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param query the query document that specifies the criteria used to remove a record.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
*/
Mono<DeleteResult> remove(Query query, String collectionName);
@@ -1611,7 +1521,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support.
*
* @param query the query document that specifies the criteria used to find and remove documents.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link Flux} converted objects deleted by this operation.
*/
<T> Flux<T> findAllAndRemove(Query query, String collectionName);
@@ -1634,80 +1544,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
*
* @param query the query document that specifies the criteria used to find and remove documents.
* @param entityClass class of the pojo to be operated on.
* @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} or empty.
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
* @return the {@link Flux} converted objects deleted by this operation.
*/
<T> Flux<T> findAllAndRemove(Query query, Class<T> entityClass, String collectionName);
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document. <br />
* The collection name is derived from the {@literal replacement} type. <br />
* Options are defaulted to {@link ReplaceOptions#none()}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 4.2
*/
default <T> Mono<UpdateResult> replace(Query query, T replacement) {
return replace(query, replacement, ReplaceOptions.none());
}
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document. Options are defaulted to {@link ReplaceOptions#none()}.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param collectionName the collection to query. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @since 4.2
*/
default <T> Mono<UpdateResult> replace(Query query, T replacement, String collectionName) {
return replace(query, replacement, ReplaceOptions.none(), collectionName);
}
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document taking {@link ReplaceOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 4.2
*/
default <T> Mono<UpdateResult> replace(Query query, T replacement, ReplaceOptions options) {
return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement)));
}
/**
* Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement}
* document taking {@link ReplaceOptions} into account.
*
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may *
* contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation}
* to use. Must not be {@literal null}.
* @param replacement the replacement document. Must not be {@literal null}.
* @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}.
* @return the {@link UpdateResult} which lets you access the results of the previous replacement.
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
* {@link #getCollectionName(Class) derived} from the given replacement value.
* @since 4.2
*/
<T> Mono<UpdateResult> replace(Query query, T replacement, ReplaceOptions options, String collectionName);
/**
* Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified
* type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite
@@ -1718,7 +1559,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned {@link Flux}.
* @return the {@link Flux} of converted objects.
@@ -1737,7 +1578,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
* feature rich {@link Query}.
*
* @param query the query class that specifies the criteria used to find a document and also an optional fields
* @param query the query class that specifies the criteria used to find a record and also an optional fields
* specification.
* @param entityClass the parametrized type of the returned {@link Flux}.
* @param collectionName name of the collection to retrieve the objects from.
@@ -1852,7 +1693,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
<T> Flux<T> mapReduce(Query filterQuery, Class<?> domainType, String inputCollectionName, Class<T> resultType,
String mapFunction, String reduceFunction, MapReduceOptions options);
/**
* Returns the underlying {@link MongoConverter}.
*

View File

@@ -69,7 +69,7 @@ public interface ReactiveRemoveOperation {
/**
* Remove and return all matching documents. <br/>
* <strong>NOTE:</strong> The entire list of documents will be fetched before sending the actual delete commands.
* <strong>NOTE</strong> The entire list of documents will be fetched before sending the actual delete commands.
* Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete
* operation.
*

View File

@@ -72,30 +72,13 @@ public interface ReactiveUpdateOperation {
Mono<T> findAndModify();
}
/**
* Trigger <a href="https://docs.mongodb.com/manual/reference/method/db.collection.replaceOne/">replaceOne</a>
* execution by calling one of the terminating methods.
*
* @author Christoph Strobl
* @since 4.2
*/
interface TerminatingReplace {
/**
* Find first and replace/upsert.
*
* @return never {@literal null}.
*/
Mono<UpdateResult> replaceFirst();
}
/**
* Compose findAndReplace execution by calling one of the terminating methods.
*
* @author Mark Paluch
* @since 2.1
*/
interface TerminatingFindAndReplace<T> extends TerminatingReplace {
interface TerminatingFindAndReplace<T> {
/**
* Find, replace and return the first matching document.
@@ -219,22 +202,6 @@ public interface ReactiveUpdateOperation {
TerminatingFindAndModify<T> withOptions(FindAndModifyOptions options);
}
/**
* @author Christoph Strobl
* @since 4.2
*/
interface ReplaceWithOptions extends TerminatingReplace {
/**
* Explicitly define {@link ReplaceOptions}.
*
* @param options must not be {@literal null}.
* @return new instance of {@link FindAndReplaceOptions}.
* @throws IllegalArgumentException if options is {@literal null}.
*/
TerminatingReplace withOptions(ReplaceOptions options);
}
/**
* Define {@link FindAndReplaceOptions}.
*
@@ -242,7 +209,7 @@ public interface ReactiveUpdateOperation {
* @author Christoph Strobl
* @since 2.1
*/
interface FindAndReplaceWithOptions<T> extends TerminatingFindAndReplace<T>, ReplaceWithOptions {
interface FindAndReplaceWithOptions<T> extends TerminatingFindAndReplace<T> {
/**
* Explicitly define {@link FindAndReplaceOptions} for the {@link Update}.

View File

@@ -165,17 +165,6 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
replacement, targetType);
}
@Override
public TerminatingReplace withOptions(ReplaceOptions options) {
FindAndReplaceOptions target = new FindAndReplaceOptions();
if (options.isUpsert()) {
target.upsert();
}
return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
target, replacement, targetType);
}
@Override
public <R> FindAndReplaceWithOptions<R> as(Class<R> resultType) {
@@ -185,18 +174,6 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
findAndReplaceOptions, replacement, resultType);
}
@Override
public Mono <UpdateResult> replaceFirst() {
if (replacement != null) {
return template.replace(query, domainType, replacement,
findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName());
}
return template.replace(query, domainType, update,
findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName());
}
private Mono<UpdateResult> doUpdate(boolean multi, boolean upsert) {
return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi);
}

View File

@@ -1,46 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.lang.Nullable;
import com.mongodb.ReadConcern;
/**
* Interface to be implemented by any object that wishes to expose the {@link ReadConcern}.
* <p>
* Typically implemented by cursor or query preparer objects.
*
* @author Mark Paluch
* @since 4.1
* @see org.springframework.data.mongodb.core.query.Query
* @see org.springframework.data.mongodb.core.aggregation.AggregationOptions
*/
public interface ReadConcernAware {
/**
* @return {@literal true} if a {@link ReadConcern} is set.
*/
default boolean hasReadConcern() {
return getReadConcern() != null;
}
/**
* @return the {@link ReadConcern} to apply or {@literal null} if none set.
*/
@Nullable
ReadConcern getReadConcern();
}

View File

@@ -27,8 +27,6 @@ import com.mongodb.ReadPreference;
* @author Christoph Strobl
* @author Mark Paluch
* @since 2.2
* @see org.springframework.data.mongodb.core.query.Query
* @see org.springframework.data.mongodb.core.aggregation.AggregationOptions
*/
public interface ReadPreferenceAware {

View File

@@ -1,87 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import org.springframework.data.mongodb.core.query.Query;
/**
* Options for {@link org.springframework.data.mongodb.core.MongoOperations#replace(Query, Object) replace operations}. Defaults to
* <dl>
* <dt>upsert</dt>
* <dd>false</dd>
* </dl>
*
* @author Jakub Zurawa
* @author Christoph Strob
* @since 4.2
*/
public class ReplaceOptions {
private boolean upsert;
private static final ReplaceOptions NONE = new ReplaceOptions() {
private static final String ERROR_MSG = "ReplaceOptions.none() cannot be changed; Please use ReplaceOptions.options() instead";
@Override
public ReplaceOptions upsert() {
throw new UnsupportedOperationException(ERROR_MSG);
}
};
/**
* Static factory method to create a {@link ReplaceOptions} instance.
* <dl>
* <dt>upsert</dt>
* <dd>false</dd>
* </dl>
*
* @return new instance of {@link ReplaceOptions}.
*/
public static ReplaceOptions replaceOptions() {
return new ReplaceOptions();
}
/**
* Static factory method returning an unmodifiable {@link ReplaceOptions} instance.
*
* @return unmodifiable {@link ReplaceOptions} instance.
*/
public static ReplaceOptions none() {
return NONE;
}
/**
* Insert a new document if not exists.
*
* @return this.
*/
public ReplaceOptions upsert() {
this.upsert = true;
return this;
}
/**
* Get the bit indicating if to create a new document if not exists.
*
* @return {@literal true} if set.
*/
public boolean isUpsert() {
return upsert;
}
}

View File

@@ -1,268 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.IntFunction;
import org.bson.BsonNull;
import org.bson.Document;
import org.springframework.data.domain.KeysetScrollPosition;
import org.springframework.data.domain.ScrollPosition;
import org.springframework.data.domain.ScrollPosition.Direction;
import org.springframework.data.domain.Window;
import org.springframework.data.mongodb.core.EntityOperations.Entity;
import org.springframework.data.mongodb.core.query.Query;
/**
* Utilities to run scroll queries and create {@link Window} results.
*
* @author Mark Paluch
* @author Christoph Strobl
* @since 4.1
*/
class ScrollUtils {
/**
* Create the actual query to run keyset-based pagination. Affects projection, sorting, and the criteria.
*
* @param query
* @param idPropertyName
* @return
*/
static KeysetScrollQuery createKeysetPaginationQuery(Query query, String idPropertyName) {
KeysetScrollPosition keyset = query.getKeyset();
KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection());
Document sortObject = director.getSortObject(idPropertyName, query);
Document fieldsObject = director.getFieldsObject(query.getFieldsObject(), sortObject);
Document queryObject = director.createQuery(keyset, query.getQueryObject(), sortObject);
return new KeysetScrollQuery(queryObject, fieldsObject, sortObject);
}
static <T> Window<T> createWindow(Query query, List<T> result, Class<?> sourceType, EntityOperations operations) {
Document sortObject = query.getSortObject();
KeysetScrollPosition keyset = query.getKeyset();
Direction direction = keyset.getDirection();
KeysetScrollDirector director = KeysetScrollDirector.of(direction);
List<T> resultsToUse = director.postPostProcessResults(result, query.getLimit());
IntFunction<ScrollPosition> positionFunction = value -> {
T last = resultsToUse.get(value);
Entity<T> entity = operations.forEntity(last);
Map<String, Object> keys = entity.extractKeys(sortObject, sourceType);
return ScrollPosition.of(keys, direction);
};
return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit()));
}
static <T> Window<T> createWindow(List<T> result, int limit, IntFunction<? extends ScrollPosition> positionFunction) {
return Window.from(getSubList(result, limit), positionFunction, hasMoreElements(result, limit));
}
static boolean hasMoreElements(List<?> result, int limit) {
return !result.isEmpty() && result.size() > limit;
}
static <T> List<T> getSubList(List<T> result, int limit) {
if (limit > 0 && result.size() > limit) {
return result.subList(0, limit);
}
return result;
}
record KeysetScrollQuery(Document query, Document fields, Document sort) {
}
/**
* Director for keyset scrolling.
*/
static class KeysetScrollDirector {
private static final KeysetScrollDirector FORWARD = new KeysetScrollDirector();
private static final KeysetScrollDirector REVERSE = new ReverseKeysetScrollDirector();
/**
* Factory method to obtain the right {@link KeysetScrollDirector}.
*
* @param direction
* @return
*/
public static KeysetScrollDirector of(ScrollPosition.Direction direction) {
return direction == Direction.FORWARD ? FORWARD : REVERSE;
}
public Document getSortObject(String idPropertyName, Query query) {
Document sortObject = query.isSorted() ? query.getSortObject() : new Document();
sortObject.put(idPropertyName, 1);
return sortObject;
}
public Document getFieldsObject(Document fieldsObject, Document sortObject) {
// make sure we can extract the keyset
if (!fieldsObject.isEmpty()) {
for (String field : sortObject.keySet()) {
fieldsObject.put(field, 1);
}
}
return fieldsObject;
}
public Document createQuery(KeysetScrollPosition keyset, Document queryObject, Document sortObject) {
Map<String, Object> keysetValues = keyset.getKeys();
List<Document> or = (List<Document>) queryObject.getOrDefault("$or", new ArrayList<>());
List<String> sortKeys = new ArrayList<>(sortObject.keySet());
// first query doesn't come with a keyset
if (keysetValues.isEmpty()) {
return queryObject;
}
if (!keysetValues.keySet().containsAll(sortKeys)) {
throw new IllegalStateException("KeysetScrollPosition does not contain all keyset values");
}
// build matrix query for keyset paging that contains sort^2 queries
// reflecting a query that follows sort order semantics starting from the last returned keyset
for (int i = 0; i < sortKeys.size(); i++) {
Document sortConstraint = new Document();
for (int j = 0; j < sortKeys.size(); j++) {
String sortSegment = sortKeys.get(j);
int sortOrder = sortObject.getInteger(sortSegment);
Object o = keysetValues.get(sortSegment);
if (j >= i) { // tail segment
if (o instanceof BsonNull) {
throw new IllegalStateException(
"Cannot resume from KeysetScrollPosition. Offending key: '%s' is 'null'".formatted(sortSegment));
}
sortConstraint.put(sortSegment, new Document(getComparator(sortOrder), o));
break;
}
sortConstraint.put(sortSegment, o);
}
if (!sortConstraint.isEmpty()) {
or.add(sortConstraint);
}
}
if (!or.isEmpty()) {
queryObject.put("$or", or);
}
return queryObject;
}
protected String getComparator(int sortOrder) {
return sortOrder == 1 ? "$gt" : "$lt";
}
protected <T> List<T> postPostProcessResults(List<T> list, int limit) {
return getFirst(limit, list);
}
}
/**
* Reverse scrolling director variant applying {@link KeysetScrollPosition.Direction#Backward}. In reverse scrolling,
* we need to flip directions for the actual query so that we do not get everything from the top position and apply
* the limit but rather flip the sort direction, apply the limit and then reverse the result to restore the actual
* sort order.
*/
private static class ReverseKeysetScrollDirector extends KeysetScrollDirector {
@Override
public Document getSortObject(String idPropertyName, Query query) {
Document sortObject = super.getSortObject(idPropertyName, query);
// flip sort direction for backward scrolling
for (String field : sortObject.keySet()) {
sortObject.put(field, sortObject.getInteger(field) == 1 ? -1 : 1);
}
return sortObject;
}
@Override
public <T> List<T> postPostProcessResults(List<T> list, int limit) {
// flip direction of the result list as we need to accomodate for the flipped sort order for proper offset
// querying.
Collections.reverse(list);
return getLast(limit, list);
}
}
/**
* Return the first {@code count} items from the list.
*
* @param count
* @param list
* @return
* @param <T>
*/
static <T> List<T> getFirst(int count, List<T> list) {
if (count > 0 && list.size() > count) {
return list.subList(0, count);
}
return list;
}
/**
* Return the last {@code count} items from the list.
*
* @param count
* @param list
* @return
* @param <T>
*/
static <T> List<T> getLast(int count, List<T> list) {
if (count > 0 && list.size() > count) {
return list.subList(list.size() - count, list.size());
}
return list;
}
}

View File

@@ -69,8 +69,8 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
@SuppressWarnings("unchecked")
private Object unpack(Object value, AggregationOperationContext context) {
if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
}
if (value instanceof Field field) {
@@ -136,8 +136,8 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
List<Object> clone = new ArrayList<>((List<Object>) this.value);
if (value instanceof Collection<?> collection && Expand.EXPAND_VALUES.equals(expandList)) {
clone.addAll(collection);
if (value instanceof Collection && Expand.EXPAND_VALUES.equals(expandList)) {
clone.addAll((Collection<?>) value);
} else {
clone.add(value);
}

View File

@@ -360,8 +360,10 @@ public class AccumulatorOperators {
@SuppressWarnings("unchecked")
public Document toDocument(Object value, AggregationOperationContext context) {
if (value instanceof List<?> list && list.size() == 1) {
return super.toDocument(list.iterator().next(), context);
if (value instanceof List) {
if (((List) value).size() == 1) {
return super.toDocument(((List<Object>) value).iterator().next(), context);
}
}
return super.toDocument(value, context);
@@ -438,8 +440,10 @@ public class AccumulatorOperators {
@SuppressWarnings("unchecked")
public Document toDocument(Object value, AggregationOperationContext context) {
if (value instanceof List<?> list && list.size() == 1) {
return super.toDocument(list.iterator().next(), context);
if (value instanceof List) {
if (((List) value).size() == 1) {
return super.toDocument(((List<Object>) value).iterator().next(), context);
}
}
return super.toDocument(value, context);
@@ -535,8 +539,10 @@ public class AccumulatorOperators {
@SuppressWarnings("unchecked")
public Document toDocument(Object value, AggregationOperationContext context) {
if (value instanceof List<?> list && list.size() == 1) {
return super.toDocument(list.iterator().next(), context);
if (value instanceof List) {
if (((List) value).size() == 1) {
return super.toDocument(((List<Object>) value).iterator().next(), context);
}
}
return super.toDocument(value, context);
@@ -633,8 +639,10 @@ public class AccumulatorOperators {
@SuppressWarnings("unchecked")
public Document toDocument(Object value, AggregationOperationContext context) {
if (value instanceof List<?> list && list.size() == 1) {
return super.toDocument(list.iterator().next(), context);
if (value instanceof List) {
if (((List) value).size() == 1) {
return super.toDocument(((List<Object>) value).iterator().next(), context);
}
}
return super.toDocument(value, context);
@@ -711,8 +719,10 @@ public class AccumulatorOperators {
@SuppressWarnings("unchecked")
public Document toDocument(Object value, AggregationOperationContext context) {
if (value instanceof List<?> list && list.size() == 1) {
return super.toDocument(list.iterator().next(), context);
if (value instanceof List) {
if (((List) value).size() == 1) {
return super.toDocument(((List<Object>) value).iterator().next(), context);
}
}
return super.toDocument(value, context);
@@ -789,8 +799,10 @@ public class AccumulatorOperators {
@SuppressWarnings("unchecked")
public Document toDocument(Object value, AggregationOperationContext context) {
if (value instanceof List<?> list && list.size() == 1) {
return super.toDocument(list.iterator().next(), context);
if (value instanceof List) {
if (((List) value).size() == 1) {
return super.toDocument(((List<Object>) value).iterator().next(), context);
}
}
return super.toDocument(value, context);

View File

@@ -148,7 +148,7 @@ public class AddFieldsOperation extends DocumentEnhancingOperation {
@Override
public AddFieldsOperationBuilder withValueOf(Object value) {
valueMap.put(field, value instanceof String stringValue ? Fields.fields(stringValue) : value);
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
return AddFieldsOperationBuilder.this;
}

View File

@@ -28,7 +28,6 @@ import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddF
import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder;
import org.springframework.data.mongodb.core.aggregation.LookupOperation.LookupOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder;
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder;
@@ -51,7 +50,6 @@ import org.springframework.util.Assert;
* @author Nikolay Bogdanov
* @author Gustavo de Geus
* @author Jérôme Guyon
* @author Sangyong Choi
* @since 1.3
*/
public class Aggregation {
@@ -666,23 +664,6 @@ public class Aggregation {
return new LookupOperation(from, localField, foreignField, as);
}
/**
* Entrypoint for creating {@link LookupOperation $lookup} using a fluent builder API.
* <pre class="code">
* Aggregation.lookup().from("restaurants")
* .localField("restaurant_name")
* .foreignField("name")
* .let(newVariable("orders_drink").forField("drink"))
* .pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages")))))
* .as("matches")
* </pre>
* @return new instance of {@link LookupOperationBuilder}.
* @since 4.1
*/
public static LookupOperationBuilder lookup() {
return new LookupOperationBuilder();
}
/**
* Creates a new {@link CountOperationBuilder}.
*

View File

@@ -21,10 +21,6 @@ import org.springframework.data.mongodb.MongoExpression;
/**
* An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like
* {@code project} and {@code group}.
* <p>
* The {@link AggregationExpression expressions} {@link #toDocument(AggregationOperationContext)} method is called during
* the mapping process to obtain the mapped, ready to use representation that can be handed over to the driver as part
* of an {@link AggregationOperation pipleine stage}.
*
* @author Thomas Darimont
* @author Oliver Gierke
@@ -43,11 +39,11 @@ public interface AggregationExpression extends MongoExpression {
*/
static AggregationExpression from(MongoExpression expression) {
if (expression instanceof AggregationExpression aggregationExpression) {
return aggregationExpression;
if (expression instanceof AggregationExpression) {
return AggregationExpression.class.cast(expression);
}
return context -> context.getMappedObject(expression.toDocument());
return (context) -> context.getMappedObject(expression.toDocument());
}
/**

View File

@@ -1,58 +0,0 @@
/*
* Copyright 2023 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.aggregation;
import org.bson.Document;
import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.Expr;
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
/**
* A {@link CriteriaDefinition criteria} to use {@code $expr} within a
* {@link org.springframework.data.mongodb.core.query.Query}.
*
* @author Christoph Strobl
* @since 4.1
*/
public class AggregationExpressionCriteria implements CriteriaDefinition {
private final AggregationExpression expression;
AggregationExpressionCriteria(AggregationExpression expression) {
this.expression = expression;
}
/**
* @param expression must not be {@literal null}.
* @return new instance of {@link AggregationExpressionCriteria}.
*/
public static AggregationExpressionCriteria whereExpr(AggregationExpression expression) {
return new AggregationExpressionCriteria(expression);
}
@Override
public Document getCriteriaObject() {
if (expression instanceof Expr expr) {
return new Document(getKey(), expr.get(0));
}
return new Document(getKey(), expression);
}
@Override
public String getKey() {
return "$expr";
}
}

View File

@@ -55,15 +55,16 @@ class AggregationOperationRenderer {
operationDocuments.addAll(operation.toPipelineStages(contextToUse));
if (operation instanceof FieldsExposingAggregationOperation exposedFieldsOperation) {
if (operation instanceof FieldsExposingAggregationOperation) {
FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation;
ExposedFields fields = exposedFieldsOperation.getFields();
if (operation instanceof InheritsFieldsAggregationOperation || exposedFieldsOperation.inheritsFields()) {
contextToUse = new InheritingExposedFieldsAggregationOperationContext(fields, contextToUse);
} else {
contextToUse = fields.exposesNoFields() ? DEFAULT_CONTEXT
: new ExposedFieldsAggregationOperationContext(fields, contextToUse);
: new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), contextToUse);
}
}
}

View File

@@ -19,16 +19,11 @@ import java.time.Duration;
import java.util.Optional;
import org.bson.Document;
import org.springframework.data.mongodb.core.ReadConcernAware;
import org.springframework.data.mongodb.core.ReadPreferenceAware;
import org.springframework.data.mongodb.core.query.Collation;
import org.springframework.data.mongodb.util.BsonUtils;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import com.mongodb.ReadConcern;
import com.mongodb.ReadPreference;
/**
* Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support
* aggregation options can be found in the MongoDB reference documentation
@@ -44,7 +39,7 @@ import com.mongodb.ReadPreference;
* @see TypedAggregation#withOptions(AggregationOptions)
* @since 1.6
*/
public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware {
public class AggregationOptions {
private static final String BATCH_SIZE = "batchSize";
private static final String CURSOR = "cursor";
@@ -61,10 +56,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
private final Optional<Collation> collation;
private final Optional<String> comment;
private final Optional<Object> hint;
private Optional<ReadConcern> readConcern;
private Optional<ReadPreference> readPreference;
private Duration maxTime = Duration.ZERO;
private ResultOptions resultOptions = ResultOptions.READ;
private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED;
@@ -132,8 +123,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
this.collation = Optional.ofNullable(collation);
this.comment = Optional.ofNullable(comment);
this.hint = Optional.ofNullable(hint);
this.readConcern = Optional.empty();
this.readPreference = Optional.empty();
}
/**
@@ -279,26 +268,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
return hint;
}
@Override
public boolean hasReadConcern() {
return readConcern.isPresent();
}
@Override
public ReadConcern getReadConcern() {
return readConcern.orElse(null);
}
@Override
public boolean hasReadPreference() {
return readPreference.isPresent();
}
@Override
public ReadPreference getReadPreference() {
return readPreference.orElse(null);
}
/**
* @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior.
* @since 3.0
@@ -416,8 +385,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
private @Nullable Collation collation;
private @Nullable String comment;
private @Nullable Object hint;
private @Nullable ReadConcern readConcern;
private @Nullable ReadPreference readPreference;
private @Nullable Duration maxTime;
private @Nullable ResultOptions resultOptions;
private @Nullable DomainTypeMapping domainTypeMapping;
@@ -523,32 +490,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
return this;
}
/**
* Define a {@link ReadConcern} to apply to the aggregation.
*
* @param readConcern can be {@literal null}.
* @return this.
* @since 4.1
*/
public Builder readConcern(@Nullable ReadConcern readConcern) {
this.readConcern = readConcern;
return this;
}
/**
* Define a {@link ReadPreference} to apply to the aggregation.
*
* @param readPreference can be {@literal null}.
* @return this.
* @since 4.1
*/
public Builder readPreference(@Nullable ReadPreference readPreference) {
this.readPreference = readPreference;
return this;
}
/**
* Set the time limit for processing.
*
@@ -632,12 +573,6 @@ public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware
if (domainTypeMapping != null) {
options.domainTypeMapping = domainTypeMapping;
}
if (readConcern != null) {
options.readConcern = Optional.of(readConcern);
}
if (readPreference != null) {
options.readPreference = Optional.of(readPreference);
}
return options;
}

View File

@@ -105,6 +105,6 @@ public class AggregationResults<T> implements Iterable<T> {
private String parseServerUsed() {
Object object = rawResults.get("serverUsed");
return object instanceof String stringValue ? stringValue : null;
return object instanceof String ? (String) object : null;
}
}

View File

@@ -97,8 +97,10 @@ public class AggregationUpdate extends Aggregation implements UpdateDefinition {
super(pipeline);
for (AggregationOperation operation : pipeline) {
if (operation instanceof FieldsExposingAggregationOperation exposingAggregationOperation) {
exposingAggregationOperation.getFields().forEach(it -> keysTouched.add(it.getName()));
if (operation instanceof FieldsExposingAggregationOperation) {
((FieldsExposingAggregationOperation) operation).getFields().forEach(it -> {
keysTouched.add(it.getName());
});
}
}
}

View File

@@ -1,133 +0,0 @@
/*
* Copyright 2022-2024 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mongodb.core.aggregation;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
/**
* A special field that points to a variable {@code $$} expression.
*
* @author Christoph Strobl
* @since 4.1.3
*/
public interface AggregationVariable extends Field {
String PREFIX = "$$";
/**
* @return {@literal true} if the fields {@link #getName() name} does not match the defined {@link #getTarget()
* target}.
*/
@Override
default boolean isAliased() {
return !ObjectUtils.nullSafeEquals(getName(), getTarget());
}
@Override
default String getName() {
return getTarget();
}
@Override
default boolean isInternal() {
return false;
}
/**
* Create a new {@link AggregationVariable} for the given name.
* <p>
* Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}.
*
* @param value must not be {@literal null}.
* @return new instance of {@link AggregationVariable}.
* @throws IllegalArgumentException if given value is {@literal null}.
*/
static AggregationVariable variable(String value) {
Assert.notNull(value, "Value must not be null");
return new AggregationVariable() {
private final String val = AggregationVariable.prefixVariable(value);
@Override
public String getTarget() {
return val;
}
};
}
/**
* Create a new {@link #isInternal() local} {@link AggregationVariable} for the given name.
* <p>
* Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}.
*
* @param value must not be {@literal null}.
* @return new instance of {@link AggregationVariable}.
* @throws IllegalArgumentException if given value is {@literal null}.
*/
static AggregationVariable localVariable(String value) {
Assert.notNull(value, "Value must not be null");
return new AggregationVariable() {
private final String val = AggregationVariable.prefixVariable(value);
@Override
public String getTarget() {
return val;
}
@Override
public boolean isInternal() {
return true;
}
};
}
/**
* Check if the given field name reference may be variable.
*
* @param fieldRef can be {@literal null}.
* @return true if given value matches the variable identification pattern.
*/
static boolean isVariable(@Nullable String fieldRef) {
return fieldRef != null && fieldRef.stripLeading().matches("^\\$\\$\\w.*");
}
/**
* Check if the given field may be variable.
*
* @param field can be {@literal null}.
* @return true if given {@link Field field} is an {@link AggregationVariable} or if its value is a
* {@link #isVariable(String) variable}.
*/
static boolean isVariable(Field field) {
if (field instanceof AggregationVariable) {
return true;
}
return isVariable(field.getTarget());
}
private static String prefixVariable(String variable) {
var trimmed = variable.stripLeading();
return trimmed.startsWith(PREFIX) ? trimmed : (PREFIX + trimmed);
}
}

View File

@@ -79,7 +79,7 @@ public class ArrayOperators {
private final @Nullable String fieldReference;
private final @Nullable AggregationExpression expression;
private final @Nullable Collection<?> values;
private final @Nullable Collection values;
/**
* Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}.
@@ -214,10 +214,6 @@ public class ArrayOperators {
return Filter.filter(fieldReference);
}
if (usesExpression()) {
return Filter.filter(expression);
}
Assert.state(values != null, "Values must not be null");
return Filter.filter(new ArrayList<>(values));
}
@@ -321,8 +317,7 @@ public class ArrayOperators {
}
/**
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort
* order}.
* Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort order}.
*
* @return new instance of {@link SortArray}.
* @since 4.0
@@ -402,8 +397,8 @@ public class ArrayOperators {
}
/**
* Creates new {@link AggregationExpression} that return the last element in the given array. <strong>NOTE:</strong>
* Requires MongoDB 4.4 or later.
* Creates new {@link AggregationExpression} that return the last element in the given array.
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
*
* @return new instance of {@link Last}.
* @since 3.4
@@ -654,19 +649,6 @@ public class ArrayOperators {
return new FilterExpressionBuilder().filter(field);
}
/**
* Set the {@link AggregationExpression} resolving to an arry to apply the {@code $filter} to.
*
* @param expression must not be {@literal null}.
* @return never {@literal null}.
* @since 4.2
*/
public static AsBuilder filter(AggregationExpression expression) {
Assert.notNull(expression, "Field must not be null");
return new FilterExpressionBuilder().filter(expression);
}
/**
* Set the {@literal values} to apply the {@code $filter} to.
*
@@ -699,27 +681,18 @@ public class ArrayOperators {
}
private Object getMappedInput(AggregationOperationContext context) {
if (input instanceof Field field) {
return context.getReference(field).toString();
}
if (input instanceof AggregationExpression expression) {
return expression.toDocument(context);
}
return input;
return input instanceof Field ? context.getReference((Field) input).toString() : input;
}
private Object getMappedCondition(AggregationOperationContext context) {
if (!(condition instanceof AggregationExpression aggregationExpression)) {
if (!(condition instanceof AggregationExpression)) {
return condition;
}
NestedDelegatingExpressionAggregationOperationContext nea = new NestedDelegatingExpressionAggregationOperationContext(
context, Collections.singleton(as));
return aggregationExpression.toDocument(nea);
return ((AggregationExpression) condition).toDocument(nea);
}
/**
@@ -742,15 +715,6 @@ public class ArrayOperators {
* @return
*/
AsBuilder filter(Field field);
/**
* Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to.
*
* @param expression must not be {@literal null}.
* @return
* @since 4.1.1
*/
AsBuilder filter(AggregationExpression expression);
}
/**
@@ -821,7 +785,7 @@ public class ArrayOperators {
public AsBuilder filter(List<?> array) {
Assert.notNull(array, "Array must not be null");
filter.input = new ArrayList<>(array);
filter.input = new ArrayList<Object>(array);
return this;
}
@@ -833,14 +797,6 @@ public class ArrayOperators {
return this;
}
@Override
public AsBuilder filter(AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
filter.input = expression;
return this;
}
@Override
public ConditionBuilder as(String variableName) {
@@ -1336,10 +1292,10 @@ public class ArrayOperators {
if (value instanceof Document) {
return value;
}
if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
} else if (value instanceof Field field) {
return context.getReference(field).toString();
if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
} else if (value instanceof Field) {
return context.getReference(((Field) value)).toString();
} else {
return context.getMappedObject(new Document("###val###", value)).get("###val###");
}
@@ -1377,7 +1333,7 @@ public class ArrayOperators {
Assert.notNull(expressions, "PropertyExpressions must not be null");
return new Reduce(Fields.field(fieldReference), initialValue,
Arrays.<AggregationExpression> asList(expressions));
Arrays.<AggregationExpression>asList(expressions));
}
};
}
@@ -1515,15 +1471,24 @@ public class ArrayOperators {
}
}
public enum Variable implements AggregationVariable {
public enum Variable implements Field {
THIS {
@Override
public String getName() {
return "$$this";
}
@Override
public String getTarget() {
return "$$this";
}
@Override
public boolean isAliased() {
return false;
}
@Override
public String toString() {
return getName();
@@ -1531,23 +1496,27 @@ public class ArrayOperators {
},
VALUE {
@Override
public String getName() {
return "$$value";
}
@Override
public String getTarget() {
return "$$value";
}
@Override
public boolean isAliased() {
return false;
}
@Override
public String toString() {
return getName();
}
};
@Override
public boolean isInternal() {
return true;
}
/**
* Create a {@link Field} reference to a given {@literal property} prefixed with the {@link Variable} identifier.
* eg. {@code $$value.product}
@@ -1579,16 +1548,6 @@ public class ArrayOperators {
}
};
}
public static boolean isVariable(Field field) {
for (Variable var : values()) {
if (field.getTarget().startsWith(var.getTarget())) {
return true;
}
}
return false;
}
}
}
@@ -1696,7 +1655,7 @@ public class ArrayOperators {
private ZipBuilder(Object sourceArray) {
this.sourceArrays = new ArrayList<>();
this.sourceArrays = new ArrayList<Object>();
this.sourceArrays.add(sourceArray);
}
@@ -1713,14 +1672,14 @@ public class ArrayOperators {
Assert.notNull(arrays, "Arrays must not be null");
for (Object value : arrays) {
if (value instanceof String stringValue) {
sourceArrays.add(Fields.field(stringValue));
if (value instanceof String) {
sourceArrays.add(Fields.field((String) value));
} else {
sourceArrays.add(value);
}
}
return new Zip(Collections.singletonMap("inputs", sourceArrays));
return new Zip(Collections.<String, Object>singletonMap("inputs", sourceArrays));
}
}
}
@@ -1731,7 +1690,7 @@ public class ArrayOperators {
* @author Christoph Strobl
* @author Shashank Sharma
* @see <a href=
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
* @since 2.2
*/
public static class In extends AbstractAggregationExpression {
@@ -1820,7 +1779,7 @@ public class ArrayOperators {
*
* @author Christoph Strobl
* @see <a href=
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
* "https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/">https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/</a>
* @since 2.1
*/
public static class ArrayToObject extends AbstractAggregationExpression {
@@ -2017,7 +1976,7 @@ public class ArrayOperators {
/**
* Set the order to put elements in.
*
*
* @param sort must not be {@literal null}.
* @return new instance of {@link SortArray}.
*/

View File

@@ -80,7 +80,7 @@ public class BucketOperation extends BucketOperationSupport<BucketOperation, Buc
super(bucketOperation);
this.boundaries = new ArrayList<>(boundaries);
this.boundaries = new ArrayList<Object>(boundaries);
this.defaultBucket = defaultBucket;
}
@@ -129,7 +129,7 @@ public class BucketOperation extends BucketOperationSupport<BucketOperation, Buc
Assert.notNull(boundaries, "Boundaries must not be null");
Assert.noNullElements(boundaries, "Boundaries must not contain null values");
List<Object> newBoundaries = new ArrayList<>(this.boundaries.size() + boundaries.length);
List<Object> newBoundaries = new ArrayList<Object>(this.boundaries.size() + boundaries.length);
newBoundaries.addAll(this.boundaries);
newBoundaries.addAll(Arrays.asList(boundaries));

View File

@@ -324,7 +324,7 @@ public abstract class BucketOperationSupport<T extends BucketOperationSupport<T,
Assert.hasText(operation, "Operation must not be empty or null");
Assert.notNull(value, "Values must not be null");
List<Object> objects = new ArrayList<>(values.length + 1);
List<Object> objects = new ArrayList<Object>(values.length + 1);
objects.add(value);
objects.addAll(Arrays.asList(values));
return apply(new OperationOutput(operation, objects));
@@ -350,8 +350,8 @@ public abstract class BucketOperationSupport<T extends BucketOperationSupport<T,
*/
public T as(String alias) {
if (value instanceof OperationOutput operationOutput) {
return this.operation.andOutput(operationOutput.withAlias(alias));
if (value instanceof OperationOutput) {
return this.operation.andOutput(((OperationOutput) this.value).withAlias(alias));
}
if (value instanceof Field) {
@@ -520,7 +520,7 @@ public abstract class BucketOperationSupport<T extends BucketOperationSupport<T,
Assert.notNull(values, "Values must not be null");
this.operation = operation;
this.values = new ArrayList<>(values);
this.values = new ArrayList<Object>(values);
}
private OperationOutput(Field field, OperationOutput operationOutput) {
@@ -540,18 +540,18 @@ public abstract class BucketOperationSupport<T extends BucketOperationSupport<T,
protected List<Object> getOperationArguments(AggregationOperationContext context) {
List<Object> result = new ArrayList<>(values != null ? values.size() : 1);
List<Object> result = new ArrayList<Object>(values != null ? values.size() : 1);
for (Object element : values) {
if (element instanceof Field field) {
result.add(context.getReference(field).toString());
} else if (element instanceof Fields fields) {
for (Field field : fields) {
if (element instanceof Field) {
result.add(context.getReference((Field) element).toString());
} else if (element instanceof Fields) {
for (Field field : (Fields) element) {
result.add(context.getReference(field).toString());
}
} else if (element instanceof AggregationExpression aggregationExpression) {
result.add(aggregationExpression.toDocument(context));
} else if (element instanceof AggregationExpression) {
result.add(((AggregationExpression) element).toDocument(context));
} else {
result.add(element);
}

View File

@@ -278,10 +278,10 @@ public class ConditionalOperators {
@Override
public Document toDocument(AggregationOperationContext context) {
List<Object> list = new ArrayList<>();
List<Object> list = new ArrayList<Object>();
if (condition instanceof Collection<?> collection) {
for (Object val : collection) {
if (condition instanceof Collection) {
for (Object val : ((Collection) this.condition)) {
list.add(mapCondition(val, context));
}
} else {
@@ -294,10 +294,10 @@ public class ConditionalOperators {
private Object mapCondition(Object condition, AggregationOperationContext context) {
if (condition instanceof Field field) {
return context.getReference(field).toString();
} else if (condition instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (condition instanceof Field) {
return context.getReference((Field) condition).toString();
} else if (condition instanceof AggregationExpression) {
return ((AggregationExpression) condition).toDocument(context);
} else {
return condition;
}
@@ -305,10 +305,10 @@ public class ConditionalOperators {
private Object resolve(Object value, AggregationOperationContext context) {
if (value instanceof Field field) {
return context.getReference(field).toString();
} else if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (value instanceof Field) {
return context.getReference((Field) value).toString();
} else if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
} else if (value instanceof Document) {
return value;
}
@@ -482,7 +482,7 @@ public class ConditionalOperators {
public static Switch switchCases(List<CaseOperator> conditions) {
Assert.notNull(conditions, "Conditions must not be null");
return new Switch(Collections.singletonMap("branches", new ArrayList<>(conditions)));
return new Switch(Collections.<String, Object> singletonMap("branches", new ArrayList<CaseOperator>(conditions)));
}
/**
@@ -529,10 +529,10 @@ public class ConditionalOperators {
Document dbo = new Document("case", when.toDocument(context));
if (then instanceof AggregationExpression aggregationExpression) {
dbo.put("then", aggregationExpression.toDocument(context));
} else if (then instanceof Field field) {
dbo.put("then", context.getReference(field).toString());
if (then instanceof AggregationExpression) {
dbo.put("then", ((AggregationExpression) then).toDocument(context));
} else if (then instanceof Field) {
dbo.put("then", context.getReference((Field) then).toString());
} else {
dbo.put("then", then);
}
@@ -629,8 +629,8 @@ public class ConditionalOperators {
return resolve(context, value);
}
if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
}
return context.getMappedObject(new Document("$set", value)).get("$set");
@@ -642,13 +642,13 @@ public class ConditionalOperators {
return resolve(context, value);
}
if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
}
if (value instanceof CriteriaDefinition criteriaDefinition) {
if (value instanceof CriteriaDefinition) {
Document mappedObject = context.getMappedObject(criteriaDefinition.getCriteriaObject());
Document mappedObject = context.getMappedObject(((CriteriaDefinition) value).getCriteriaObject());
List<Object> clauses = getClauses(context, mappedObject);
return clauses.size() == 1 ? clauses.get(0) : clauses;
}
@@ -659,7 +659,7 @@ public class ConditionalOperators {
private List<Object> getClauses(AggregationOperationContext context, Document mappedObject) {
List<Object> clauses = new ArrayList<>();
List<Object> clauses = new ArrayList<Object>();
for (String key : mappedObject.keySet()) {
@@ -672,20 +672,23 @@ public class ConditionalOperators {
private List<Object> getClauses(AggregationOperationContext context, String key, Object predicate) {
List<Object> clauses = new ArrayList<>();
List<Object> clauses = new ArrayList<Object>();
if (predicate instanceof List<?> predicates) {
if (predicate instanceof List) {
List<Object> args = new ArrayList<>(predicates.size());
List<?> predicates = (List<?>) predicate;
List<Object> args = new ArrayList<Object>(predicates.size());
for (Object clause : predicates) {
if (clause instanceof Document document) {
args.addAll(getClauses(context, document));
for (Object clause : (List<?>) predicate) {
if (clause instanceof Document) {
args.addAll(getClauses(context, (Document) clause));
}
}
clauses.add(new Document(key, args));
} else if (predicate instanceof Document nested) {
} else if (predicate instanceof Document) {
Document nested = (Document) predicate;
for (String s : nested.keySet()) {
@@ -693,14 +696,14 @@ public class ConditionalOperators {
continue;
}
List<Object> args = new ArrayList<>(2);
List<Object> args = new ArrayList<Object>(2);
args.add("$" + key);
args.add(nested.get(s));
clauses.add(new Document(s, args));
}
} else if (!isKeyword(key)) {
List<Object> args = new ArrayList<>(2);
List<Object> args = new ArrayList<Object>(2);
args.add("$" + key);
args.add(predicate);
clauses.add(new Document("$eq", args));
@@ -721,8 +724,8 @@ public class ConditionalOperators {
private Object resolve(AggregationOperationContext context, Object value) {
if (value instanceof Document document) {
return context.getMappedObject(document);
if (value instanceof Document) {
return context.getMappedObject((Document) value);
}
return context.getReference((Field) value).toString();

View File

@@ -159,7 +159,7 @@ public class DateOperators {
* +/-[hh], e.g. "+03"</td>
* </tr>
* </table>
* <strong>NOTE:</strong> Support for timezones in aggregations Requires MongoDB 3.6 or later.
* <strong>NOTE: </strong>Support for timezones in aggregations Requires MongoDB 3.6 or later.
*
* @author Christoph Strobl
* @author Mark Paluch
@@ -985,8 +985,8 @@ public class DateOperators {
java.util.Map<String, Object> args;
if (source instanceof Map map) {
args = new LinkedHashMap<>(map);
if (source instanceof Map) {
args = new LinkedHashMap<>((Map) source);
} else {
args = new LinkedHashMap<>(2);
args.put("date", source);
@@ -1877,12 +1877,12 @@ public class DateOperators {
java.util.Map<String, Object> clone = new LinkedHashMap<>(argumentMap());
if (value instanceof Timezone timezone) {
if (value instanceof Timezone) {
if (ObjectUtils.nullSafeEquals(value, Timezone.none())) {
clone.remove("timezone");
} else {
clone.put("timezone", timezone.value);
clone.put("timezone", ((Timezone) value).value);
}
} else {
clone.put(key, value);

View File

@@ -84,21 +84,21 @@ abstract class DocumentEnhancingOperation implements InheritsFieldsAggregationOp
return exposedFields;
}
private ExposedFields add(Object fieldValue) {
private ExposedFields add(Object field) {
if (fieldValue instanceof Field field) {
return exposedFields.and(new ExposedField(field, true));
if (field instanceof Field) {
return exposedFields.and(new ExposedField((Field) field, true));
}
if (fieldValue instanceof String fieldName) {
return exposedFields.and(new ExposedField(Fields.field(fieldName), true));
if (field instanceof String) {
return exposedFields.and(new ExposedField(Fields.field((String) field), true));
}
throw new IllegalArgumentException(String.format("Expected %s to be a field/property", fieldValue));
throw new IllegalArgumentException(String.format("Expected %s to be a field/property", field));
}
private static Document toSetEntry(Entry<Object, Object> entry, AggregationOperationContext context) {
String field = entry.getKey() instanceof String key ? context.getReference(key).getRaw()
String field = entry.getKey() instanceof String ? context.getReference((String) entry.getKey()).getRaw()
: context.getReference((Field) entry.getKey()).getRaw();
Object value = computeValue(entry.getValue(), context);
@@ -108,20 +108,20 @@ abstract class DocumentEnhancingOperation implements InheritsFieldsAggregationOp
private static Object computeValue(Object value, AggregationOperationContext context) {
if (value instanceof Field field) {
return context.getReference(field).toString();
if (value instanceof Field) {
return context.getReference((Field) value).toString();
}
if (value instanceof ExpressionProjection expressionProjection) {
return expressionProjection.toExpression(context);
if (value instanceof ExpressionProjection) {
return ((ExpressionProjection) value).toExpression(context);
}
if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
}
if (value instanceof Collection<?> collection) {
return collection.stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
if (value instanceof Collection) {
return ((Collection<?>) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
}
return value;

View File

@@ -393,11 +393,13 @@ public final class ExposedFields implements Iterable<ExposedField> {
return true;
}
if (!(obj instanceof DirectFieldReference fieldReference)) {
if (!(obj instanceof DirectFieldReference)) {
return false;
}
return this.field.equals(fieldReference.field);
DirectFieldReference that = (DirectFieldReference) obj;
return this.field.equals(that.field);
}
@Override
@@ -458,11 +460,12 @@ public final class ExposedFields implements Iterable<ExposedField> {
return true;
}
if (!(obj instanceof ExpressionFieldReference fieldReference)) {
if (!(obj instanceof ExpressionFieldReference)) {
return false;
}
return ObjectUtils.nullSafeEquals(this.delegate, fieldReference.delegate);
ExpressionFieldReference that = (ExpressionFieldReference) obj;
return ObjectUtils.nullSafeEquals(this.delegate, that.delegate);
}
@Override

View File

@@ -96,15 +96,6 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
return exposedField;
}
if (rootContext instanceof RelaxedTypeBasedAggregationOperationContext) {
if (field != null) {
return new DirectFieldReference(new ExposedField(field, true));
}
return new DirectFieldReference(new ExposedField(name, true));
}
throw new IllegalArgumentException(String.format("Invalid reference '%s'", name));
}

View File

@@ -67,7 +67,7 @@ public final class Fields implements Iterable<Field> {
Assert.notNull(names, "Field names must not be null");
List<Field> fields = new ArrayList<>();
List<Field> fields = new ArrayList<Field>();
for (String name : names) {
fields.add(field(name));
@@ -114,7 +114,7 @@ public final class Fields implements Iterable<Field> {
private static List<Field> verify(List<Field> fields) {
Map<String, Field> reference = new HashMap<>();
Map<String, Field> reference = new HashMap<String, Field>();
for (Field field : fields) {
@@ -133,7 +133,7 @@ public final class Fields implements Iterable<Field> {
private Fields(Fields existing, Field tail) {
this.fields = new ArrayList<>(existing.fields.size() + 1);
this.fields = new ArrayList<Field>(existing.fields.size() + 1);
this.fields.addAll(existing.fields);
this.fields.add(tail);
}
@@ -245,7 +245,7 @@ public final class Fields implements Iterable<Field> {
private static String cleanUp(String source) {
if (AggregationVariable.isVariable(source)) {
if (SystemVariable.isReferingToSystemVariable(source)) {
return source;
}
@@ -253,12 +253,10 @@ public final class Fields implements Iterable<Field> {
return dollarIndex == -1 ? source : source.substring(dollarIndex + 1);
}
@Override
public String getName() {
return name;
}
@Override
public String getTarget() {
if (isLocalVar() || pointsToDBRefId()) {
@@ -310,11 +308,13 @@ public final class Fields implements Iterable<Field> {
return true;
}
if (!(obj instanceof AggregationField field)) {
if (!(obj instanceof AggregationField)) {
return false;
}
return this.name.equals(field.name) && ObjectUtils.nullSafeEquals(this.target, field.target);
AggregationField that = (AggregationField) obj;
return this.name.equals(that.name) && ObjectUtils.nullSafeEquals(this.target, that.target);
}
@Override

View File

@@ -84,14 +84,14 @@ public class GraphLookupOperation implements InheritsFieldsAggregationOperation
graphLookup.put("from", from);
List<Object> mappedStartWith = new ArrayList<>(startWith.size());
List<Object> mappedStartWith = new ArrayList<Object>(startWith.size());
for (Object startWithElement : startWith) {
if (startWithElement instanceof AggregationExpression aggregationExpression) {
mappedStartWith.add(aggregationExpression.toDocument(context));
} else if (startWithElement instanceof Field field) {
mappedStartWith.add(context.getReference(field).toString());
if (startWithElement instanceof AggregationExpression) {
mappedStartWith.add(((AggregationExpression) startWithElement).toDocument(context));
} else if (startWithElement instanceof Field) {
mappedStartWith.add(context.getReference((Field) startWithElement).toString());
} else {
mappedStartWith.add(startWithElement);
}
@@ -237,7 +237,7 @@ public class GraphLookupOperation implements InheritsFieldsAggregationOperation
Assert.notNull(fieldReferences, "FieldReferences must not be null");
Assert.noNullElements(fieldReferences, "FieldReferences must not contain null elements");
List<Object> fields = new ArrayList<>(fieldReferences.length);
List<Object> fields = new ArrayList<Object>(fieldReferences.length);
for (String fieldReference : fieldReferences) {
fields.add(Fields.field(fieldReference));
@@ -269,14 +269,14 @@ public class GraphLookupOperation implements InheritsFieldsAggregationOperation
private List<Object> verifyAndPotentiallyTransformStartsWithTypes(Object... expressions) {
List<Object> expressionsToUse = new ArrayList<>(expressions.length);
List<Object> expressionsToUse = new ArrayList<Object>(expressions.length);
for (Object expression : expressions) {
assertStartWithType(expression);
if (expression instanceof String stringValue) {
expressionsToUse.add(Fields.field(stringValue));
if (expression instanceof String) {
expressionsToUse.add(Fields.field((String) expression));
} else {
expressionsToUse.add(expression);
}
@@ -333,7 +333,7 @@ public class GraphLookupOperation implements InheritsFieldsAggregationOperation
String connectTo) {
this.from = from;
this.startWith = new ArrayList<>(startWith);
this.startWith = new ArrayList<Object>(startWith);
this.connectFrom = Fields.field(connectFrom);
this.connectTo = Fields.field(connectTo);
}

View File

@@ -514,8 +514,8 @@ public class GroupOperation implements FieldsExposingAggregationOperation {
if (reference == null) {
if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
}
return value;

View File

@@ -15,7 +15,6 @@
*/
package org.springframework.data.mongodb.core.aggregation;
import org.bson.Document;
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
/**
@@ -23,7 +22,6 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldRefe
* {@link AggregationOperationContext}.
*
* @author Mark Paluch
* @author Christoph Strobl
* @since 1.9
*/
class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAggregationOperationContext {
@@ -45,11 +43,6 @@ class InheritingExposedFieldsAggregationOperationContext extends ExposedFieldsAg
this.previousContext = previousContext;
}
@Override
public Document getMappedObject(Document document) {
return previousContext.getMappedObject(document);
}
@Override
protected FieldReference resolveExposedField(Field field, String name) {

View File

@@ -15,44 +15,28 @@
*/
package org.springframework.data.mongodb.core.aggregation;
import java.util.function.Supplier;
import org.bson.Document;
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let;
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let.ExpressionVariable;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
/**
* Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the builder provided via
* {@link #newLookup()} instead of creating instances of this class directly.
* Encapsulates the aggregation framework {@code $lookup}-operation. We recommend to use the static factory method
* {@link Aggregation#lookup(String, String, String, String)} instead of creating instances of this class directly.
*
* @author Alessio Fachechi
* @author Christoph Strobl
* @author Mark Paluch
* @author Sangyong Choi
* @since 1.9
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/lookup/">MongoDB Aggregation Framework:
* $lookup</a>
*/
public class LookupOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation {
private final String from;
@Nullable //
private final Field from;
private final Field localField;
@Nullable //
private final Field foreignField;
@Nullable //
private final Let let;
@Nullable //
private final AggregationPipeline pipeline;
private final ExposedField as;
/**
@@ -64,55 +48,16 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
* @param as must not be {@literal null}.
*/
public LookupOperation(Field from, Field localField, Field foreignField, Field as) {
this(((Supplier<String>) () -> {
Assert.notNull(from, "From must not be null");
return from.getTarget();
}).get(), localField, foreignField, null, null, as);
}
/**
* Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline
* pipeline}.
*
* @param from must not be {@literal null}.
* @param let must not be {@literal null}.
* @param as must not be {@literal null}.
* @since 4.1
*/
public LookupOperation(String from, @Nullable Let let, AggregationPipeline pipeline, Field as) {
this(from, null, null, let, pipeline, as);
}
/**
* Creates a new {@link LookupOperation} for the given combination of {@link Field}s and {@link AggregationPipeline
* pipeline}.
*
* @param from must not be {@literal null}.
* @param localField can be {@literal null} if {@literal pipeline} is present.
* @param foreignField can be {@literal null} if {@literal pipeline} is present.
* @param let can be {@literal null} if {@literal localField} and {@literal foreignField} are present.
* @param as must not be {@literal null}.
* @since 4.1
*/
public LookupOperation(String from, @Nullable Field localField, @Nullable Field foreignField, @Nullable Let let,
@Nullable AggregationPipeline pipeline, Field as) {
Assert.notNull(from, "From must not be null");
if (pipeline == null) {
Assert.notNull(localField, "LocalField must not be null");
Assert.notNull(foreignField, "ForeignField must not be null");
} else if (localField == null && foreignField == null) {
Assert.notNull(pipeline, "Pipeline must not be null");
}
Assert.notNull(localField, "LocalField must not be null");
Assert.notNull(foreignField, "ForeignField must not be null");
Assert.notNull(as, "As must not be null");
this.from = from;
this.localField = localField;
this.foreignField = foreignField;
this.as = new ExposedField(as, true);
this.let = let;
this.pipeline = pipeline;
}
@Override
@@ -125,20 +70,9 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
Document lookupObject = new Document();
lookupObject.append("from", from);
if (localField != null) {
lookupObject.append("localField", localField.getTarget());
}
if (foreignField != null) {
lookupObject.append("foreignField", foreignField.getTarget());
}
if (let != null) {
lookupObject.append("let", let.toDocument(context).get("$let", Document.class).get("vars"));
}
if (pipeline != null) {
lookupObject.append("pipeline", pipeline.toDocuments(context));
}
lookupObject.append("from", from.getTarget());
lookupObject.append("localField", localField.getTarget());
lookupObject.append("foreignField", foreignField.getTarget());
lookupObject.append("as", as.getTarget());
return new Document(getOperator(), lookupObject);
@@ -167,7 +101,7 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
LocalFieldBuilder from(String name);
}
public static interface LocalFieldBuilder extends PipelineBuilder {
public static interface LocalFieldBuilder {
/**
* @param name the field from the documents input to the {@code $lookup} stage, must not be {@literal null} or
@@ -186,67 +120,7 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
AsBuilder foreignField(String name);
}
/**
* @since 4.1
* @author Christoph Strobl
*/
public interface LetBuilder {
/**
* Specifies {@link Let#getVariableNames() variables) that can be used in the
* {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}.
*
* @param let must not be {@literal null}.
* @return never {@literal null}.
* @see PipelineBuilder
*/
PipelineBuilder let(Let let);
/**
* Specifies {@link Let#getVariableNames() variables) that can be used in the
* {@link PipelineBuilder#pipeline(AggregationOperation...) pipeline stages}.
*
* @param variables must not be {@literal null}.
* @return never {@literal null}.
* @see PipelineBuilder
*/
default PipelineBuilder let(ExpressionVariable... variables) {
return let(Let.just(variables));
}
}
/**
* @since 4.1
* @author Christoph Strobl
*/
public interface PipelineBuilder extends LetBuilder {
/**
* Specifies the {@link AggregationPipeline pipeline} that determines the resulting documents.
*
* @param pipeline must not be {@literal null}.
* @return never {@literal null}.
*/
AsBuilder pipeline(AggregationPipeline pipeline);
/**
* Specifies the {@link AggregationPipeline#getOperations() stages} that determine the resulting documents.
*
* @param stages must not be {@literal null} can be empty.
* @return never {@literal null}.
*/
default AsBuilder pipeline(AggregationOperation... stages) {
return pipeline(AggregationPipeline.of(stages));
}
/**
* @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty.
* @return new instance of {@link LookupOperation}.
*/
LookupOperation as(String name);
}
public static interface AsBuilder extends PipelineBuilder {
public static interface AsBuilder {
/**
* @param name the name of the new array field to add to the input documents, must not be {@literal null} or empty.
@@ -264,12 +138,10 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
public static final class LookupOperationBuilder
implements FromBuilder, LocalFieldBuilder, ForeignFieldBuilder, AsBuilder {
private @Nullable String from;
private @Nullable Field from;
private @Nullable Field localField;
private @Nullable Field foreignField;
private @Nullable ExposedField as;
private @Nullable Let let;
private @Nullable AggregationPipeline pipeline;
/**
* Creates new builder for {@link LookupOperation}.
@@ -284,10 +156,18 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
public LocalFieldBuilder from(String name) {
Assert.hasText(name, "'From' must not be null or empty");
from = name;
from = Fields.field(name);
return this;
}
@Override
public LookupOperation as(String name) {
Assert.hasText(name, "'As' must not be null or empty");
as = new ExposedField(Fields.field(name), true);
return new LookupOperation(from, localField, foreignField, as);
}
@Override
public AsBuilder foreignField(String name) {
@@ -303,29 +183,5 @@ public class LookupOperation implements FieldsExposingAggregationOperation, Inhe
localField = Fields.field(name);
return this;
}
@Override
public PipelineBuilder let(Let let) {
Assert.notNull(let, "Let must not be null");
this.let = let;
return this;
}
@Override
public AsBuilder pipeline(AggregationPipeline pipeline) {
Assert.notNull(pipeline, "Pipeline must not be null");
this.pipeline = pipeline;
return this;
}
@Override
public LookupOperation as(String name) {
Assert.hasText(name, "'As' must not be null or empty");
as = new ExposedField(Fields.field(name), true);
return new LookupOperation(from, localField, foreignField, let, pipeline, as);
}
}
}

View File

@@ -339,8 +339,8 @@ public class MergeOperation implements FieldsExposingAggregationOperation, Inher
Document toDocument(AggregationOperationContext context) {
if (value instanceof Aggregation aggregation) {
return new Document("whenMatched", aggregation.toPipeline(context));
if (value instanceof Aggregation) {
return new Document("whenMatched", ((Aggregation) value).toPipeline(context));
}
return new Document("whenMatched", value);

View File

@@ -245,8 +245,12 @@ public class ObjectOperators {
@SuppressWarnings("unchecked")
private Object potentiallyExtractSingleValue(Object value) {
if (value instanceof Collection<?> collection && collection.size() == 1) {
return collection.iterator().next();
if (value instanceof Collection) {
Collection<Object> collection = ((Collection<Object>) value);
if (collection.size() == 1) {
return collection.iterator().next();
}
}
return value;
}

View File

@@ -115,8 +115,8 @@ public class PrefixingDelegatingAggregationOperationContext implements Aggregati
List<Object> prefixed = new ArrayList<>(sourceCollection.size());
for (Object o : sourceCollection) {
if (o instanceof Document document) {
prefixed.add(doPrefix(document));
if (o instanceof Document) {
prefixed.add(doPrefix((Document) o));
} else {
prefixed.add(o);
}

View File

@@ -207,10 +207,10 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
for (Object value : values) {
if (value instanceof Field field) {
builder.and(field);
} else if (value instanceof AggregationExpression aggregationExpression) {
builder.and(aggregationExpression);
if (value instanceof Field) {
builder.and((Field) value);
} else if (value instanceof AggregationExpression) {
builder.and((AggregationExpression) value);
} else {
builder.and(value);
}
@@ -330,7 +330,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
*
* @param expression must not be {@literal null}.
* @param operation must not be {@literal null}.
* @param parameters parameters must not be {@literal null}.
* @param parameters
*/
public ExpressionProjectionOperationBuilder(String expression, ProjectionOperation operation, Object[] parameters) {
@@ -347,7 +347,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
@Override
protected List<Object> getOperationArguments(AggregationOperationContext context) {
List<Object> result = new ArrayList<>(values.length + 1);
List<Object> result = new ArrayList<Object>(values.length + 1);
result.add(ExpressionProjection.toMongoExpression(context,
ExpressionProjectionOperationBuilder.this.expression, ExpressionProjectionOperationBuilder.this.params));
result.addAll(Arrays.asList(values));
@@ -1455,19 +1455,19 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
protected List<Object> getOperationArguments(AggregationOperationContext context) {
List<Object> result = new ArrayList<>(values.size());
List<Object> result = new ArrayList<Object>(values.size());
result.add(context.getReference(getField()).toString());
for (Object element : values) {
if (element instanceof Field field) {
result.add(context.getReference(field).toString());
} else if (element instanceof Fields fields) {
for (Field field : fields) {
if (element instanceof Field) {
result.add(context.getReference((Field) element).toString());
} else if (element instanceof Fields) {
for (Field field : (Fields) element) {
result.add(context.getReference(field).toString());
}
} else if (element instanceof AggregationExpression aggregationExpression) {
result.add(aggregationExpression.toDocument(context));
} else if (element instanceof AggregationExpression) {
result.add(((AggregationExpression) element).toDocument(context));
} else {
result.add(element);
}
@@ -1734,29 +1734,6 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
}
}
/**
* A {@link Projection} including all top level fields of the given target type mapped to include potentially
* deviating field names.
*
* @since 2.2
* @author Christoph Strobl
*/
static class FilterProjection extends Projection {
public static String FILTER_ELEMENT = "filterElement";
private final Object value;
FilterProjection(String fieldReference, Object value) {
super(Fields.field(FILTER_ELEMENT + "." + fieldReference));
this.value = value;
}
@Override
public Document toDocument(AggregationOperationContext context) {
return new Document(getExposedField().getName(), value);
}
}
/**
* Builder for {@code array} projections.
*
@@ -1852,16 +1829,20 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
private Object toArrayEntry(Object projection, AggregationOperationContext ctx) {
if (projection instanceof Field field) {
return ctx.getReference(field).toString();
if (projection instanceof Field) {
return ctx.getReference((Field) projection).toString();
}
if (projection instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(ctx);
if (projection instanceof AggregationExpression) {
return ((AggregationExpression) projection).toDocument(ctx);
}
if (projection instanceof FieldProjection fieldProjection) {
return ctx.getReference(fieldProjection.getExposedField().getTarget()).toString();
if (projection instanceof FieldProjection) {
return ctx.getReference(((FieldProjection) projection).getExposedField().getTarget()).toString();
}
if (projection instanceof Projection) {
((Projection) projection).toDocument(ctx);
}
return projection;

View File

@@ -226,14 +226,14 @@ public class RedactOperation implements AggregationOperation {
private ThenBuilder when() {
if (when instanceof CriteriaDefinition criteriaDefinition) {
return ConditionalOperators.Cond.when(criteriaDefinition);
if (when instanceof CriteriaDefinition) {
return ConditionalOperators.Cond.when((CriteriaDefinition) when);
}
if (when instanceof AggregationExpression aggregationExpression) {
return ConditionalOperators.Cond.when(aggregationExpression);
if (when instanceof AggregationExpression) {
return ConditionalOperators.Cond.when((AggregationExpression) when);
}
if (when instanceof Document document) {
return ConditionalOperators.Cond.when(document);
if (when instanceof Document) {
return ConditionalOperators.Cond.when((Document) when);
}
throw new IllegalArgumentException(String.format(

View File

@@ -21,6 +21,7 @@ import java.util.Collections;
import java.util.List;
import org.bson.Document;
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
import org.springframework.expression.spel.ast.Projection;
import org.springframework.util.Assert;
@@ -248,9 +249,9 @@ public class ReplaceRootOperation implements FieldsExposingAggregationOperation
public ReplaceRootDocumentOperation as(String fieldName) {
if (value instanceof AggregationExpression aggregationExpression) {
if (value instanceof AggregationExpression) {
return new ReplaceRootDocumentOperation(currentOperation,
ReplacementDocument.forExpression(fieldName, aggregationExpression));
ReplacementDocument.forExpression(fieldName, (AggregationExpression) value));
}
return new ReplaceRootDocumentOperation(currentOperation, ReplacementDocument.forSingleValue(fieldName, value));
@@ -430,7 +431,6 @@ public class ReplaceRootOperation implements FieldsExposingAggregationOperation
* @param context will never be {@literal null}.
* @return never {@literal null}.
*/
@Override
Document toDocument(AggregationOperationContext context);
}

View File

@@ -62,23 +62,23 @@ public class ReplaceWithOperation extends ReplaceRootOperation {
public static ReplaceWithOperation replaceWithValueOf(Object value) {
Assert.notNull(value, "Value must not be null");
return new ReplaceWithOperation(ctx -> {
return new ReplaceWithOperation((ctx) -> {
Object target = value instanceof String stringValue ? Fields.field(stringValue) : value;
Object target = value instanceof String ? Fields.field((String) value) : value;
return computeValue(target, ctx);
});
}
private static Object computeValue(Object value, AggregationOperationContext context) {
if (value instanceof Field field) {
return context.getReference(field).toString();
if (value instanceof Field) {
return context.getReference((Field) value).toString();
}
if (value instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (value instanceof AggregationExpression) {
return ((AggregationExpression) value).toDocument(context);
}
if (value instanceof Collection<?> collection) {
return collection.stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
if (value instanceof Collection) {
return ((Collection) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
}
return value;

View File

@@ -140,7 +140,7 @@ public class SetOperation extends DocumentEnhancingOperation {
@Override
public SetOperation toValueOf(Object value) {
valueMap.put(field, value instanceof String stringValue ? Fields.fields(stringValue) : value);
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
return FieldAppender.this.build();
}

View File

@@ -78,10 +78,10 @@ public class SetWindowFieldsOperation
Document $setWindowFields = new Document();
if (partitionBy != null) {
if (partitionBy instanceof AggregationExpression aggregationExpression) {
$setWindowFields.append("partitionBy", aggregationExpression.toDocument(context));
} else if (partitionBy instanceof Field field) {
$setWindowFields.append("partitionBy", context.getReference(field).toString());
if (partitionBy instanceof AggregationExpression) {
$setWindowFields.append("partitionBy", ((AggregationExpression) partitionBy).toDocument(context));
} else if (partitionBy instanceof Field) {
$setWindowFields.append("partitionBy", context.getReference((Field) partitionBy).toString());
} else {
$setWindowFields.append("partitionBy", partitionBy);
}

View File

@@ -259,7 +259,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
private Document createOperationObjectAndAddToPreviousArgumentsIfNecessary(
AggregationExpressionTransformationContext<OperatorNode> context, OperatorNode currentNode) {
Document nextDocument = new Document(currentNode.getMongoOperator(), new ArrayList<>());
Document nextDocument = new Document(currentNode.getMongoOperator(), new ArrayList<Object>());
if (!context.hasPreviousOperation()) {
return nextDocument;
@@ -282,7 +282,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
@Nullable Object leftResult) {
Object result = leftResult instanceof Number ? leftResult
: new Document("$multiply", Arrays.asList(Integer.valueOf(-1), leftResult));
: new Document("$multiply", Arrays.<Object> asList(Integer.valueOf(-1), leftResult));
if (leftResult != null && context.hasPreviousOperation()) {
context.addToPreviousOperation(result);
@@ -453,7 +453,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
}
else {
List<Object> argList = new ArrayList<>();
List<Object> argList = new ArrayList<Object>();
for (ExpressionNode childNode : node) {
argList.add(transform(childNode, context));
@@ -516,7 +516,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
protected Object convert(AggregationExpressionTransformationContext<NotOperatorNode> context) {
NotOperatorNode node = context.getCurrentNode();
List<Object> args = new ArrayList<>();
List<Object> args = new ArrayList<Object>();
for (ExpressionNode childNode : node) {
args.add(transform(childNode, context));

View File

@@ -24,7 +24,7 @@ import org.springframework.lang.Nullable;
* @author Christoph Strobl
* @see <a href="https://docs.mongodb.com/manual/reference/aggregation-variables">Aggregation Variables</a>.
*/
public enum SystemVariable implements AggregationVariable {
public enum SystemVariable {
/**
* Variable for the current datetime.
@@ -82,6 +82,8 @@ public enum SystemVariable implements AggregationVariable {
*/
SEARCH_META;
private static final String PREFIX = "$$";
/**
* Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false}
* otherwise.
@@ -91,12 +93,13 @@ public enum SystemVariable implements AggregationVariable {
*/
public static boolean isReferingToSystemVariable(@Nullable String fieldRef) {
String candidate = variableNameFrom(fieldRef);
if (candidate == null) {
if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) {
return false;
}
candidate = candidate.startsWith(PREFIX) ? candidate.substring(2) : candidate;
int indexOfFirstDot = fieldRef.indexOf('.');
String candidate = fieldRef.substring(2, indexOfFirstDot == -1 ? fieldRef.length() : indexOfFirstDot);
for (SystemVariable value : values()) {
if (value.name().equals(candidate)) {
return true;
@@ -110,20 +113,4 @@ public enum SystemVariable implements AggregationVariable {
public String toString() {
return PREFIX.concat(name());
}
@Override
public String getTarget() {
return toString();
}
@Nullable
static String variableNameFrom(@Nullable String fieldRef) {
if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) {
return null;
}
int indexOfFirstDot = fieldRef.indexOf('.');
return indexOfFirstDot == -1 ? fieldRef : fieldRef.substring(2, indexOfFirstDot);
}
}

View File

@@ -133,7 +133,7 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
protected FieldReference getReferenceFor(Field field) {
if(entity.getNullable() == null || AggregationVariable.isVariable(field)) {
if(entity.getNullable() == null) {
return new DirectFieldReference(new ExposedField(field, true));
}

View File

@@ -138,12 +138,12 @@ public class UnionWithOperation implements AggregationOperation {
private AggregationOperationContext computeContext(AggregationOperationContext source) {
if (source instanceof TypeBasedAggregationOperationContext aggregationOperationContext) {
return aggregationOperationContext.continueOnMissingFieldReference(domainType != null ? domainType : Object.class);
if (source instanceof TypeBasedAggregationOperationContext) {
return ((TypeBasedAggregationOperationContext) source).continueOnMissingFieldReference(domainType != null ? domainType : Object.class);
}
if (source instanceof ExposedFieldsAggregationOperationContext aggregationOperationContext) {
return computeContext(aggregationOperationContext.getRootContext());
if (source instanceof ExposedFieldsAggregationOperationContext) {
return computeContext(((ExposedFieldsAggregationOperationContext) source).getRootContext());
}
return source;

View File

@@ -96,8 +96,8 @@ public class UnsetOperation implements InheritsFieldsAggregationOperation {
List<String> fieldNames = new ArrayList<>(fields.size());
for (Object it : fields) {
if (it instanceof Field field) {
fieldNames.add(field.getName());
if (it instanceof Field) {
fieldNames.add(((Field) it).getName());
} else {
fieldNames.add(it.toString());
}
@@ -123,16 +123,16 @@ public class UnsetOperation implements InheritsFieldsAggregationOperation {
private Object computeFieldName(Object field, AggregationOperationContext context) {
if (field instanceof Field fieldObject) {
return context.getReference(fieldObject).getRaw();
if (field instanceof Field) {
return context.getReference((Field) field).getRaw();
}
if (field instanceof AggregationExpression aggregationExpression) {
return aggregationExpression.toDocument(context);
if (field instanceof AggregationExpression) {
return ((AggregationExpression) field).toDocument(context);
}
if (field instanceof String stringValue) {
return context.getReference(stringValue).getRaw();
if (field instanceof String) {
return context.getReference((String) field).getRaw();
}
return field;

View File

@@ -16,6 +16,7 @@
package org.springframework.data.mongodb.core.aggregation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -174,8 +175,8 @@ public class VariableOperators {
exposedFields, context);
Document input;
if (sourceArray instanceof Field field) {
input = new Document("input", context.getReference(field).toString());
if (sourceArray instanceof Field) {
input = new Document("input", context.getReference((Field) sourceArray).toString());
} else {
input = new Document("input", ((AggregationExpression) sourceArray).toDocument(context));
}
@@ -223,41 +224,28 @@ public class VariableOperators {
public static class Let implements AggregationExpression {
private final List<ExpressionVariable> vars;
@Nullable //
private final AggregationExpression expression;
private Let(List<ExpressionVariable> vars, @Nullable AggregationExpression expression) {
private Let(List<ExpressionVariable> vars, AggregationExpression expression) {
this.vars = vars;
this.expression = expression;
}
/**
* Create a new {@link Let} holding just the given {@literal variables}.
*
* @param variables must not be {@literal null}.
* @return new instance of {@link Let}.
* @since 4.1
*/
public static Let just(ExpressionVariable... variables) {
return new Let(List.of(variables), null);
}
/**
* Start creating new {@link Let} by defining the variables for {@code $vars}.
*
* @param variables must not be {@literal null}.
* @return
*/
public static LetBuilder define(Collection<ExpressionVariable> variables) {
public static LetBuilder define(final Collection<ExpressionVariable> variables) {
Assert.notNull(variables, "Variables must not be null");
return new LetBuilder() {
@Override
public Let andApply(AggregationExpression expression) {
public Let andApply(final AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
return new Let(new ArrayList<ExpressionVariable>(variables), expression);
@@ -271,10 +259,19 @@ public class VariableOperators {
* @param variables must not be {@literal null}.
* @return
*/
public static LetBuilder define(ExpressionVariable... variables) {
public static LetBuilder define(final ExpressionVariable... variables) {
Assert.notNull(variables, "Variables must not be null");
return define(List.of(variables));
return new LetBuilder() {
@Override
public Let andApply(final AggregationExpression expression) {
Assert.notNull(expression, "Expression must not be null");
return new Let(Arrays.asList(variables), expression);
}
};
}
public interface LetBuilder {
@@ -286,11 +283,10 @@ public class VariableOperators {
* @return
*/
Let andApply(AggregationExpression expression);
}
@Override
public Document toDocument(AggregationOperationContext context) {
public Document toDocument(final AggregationOperationContext context) {
return toLet(ExposedFields.synthetic(Fields.fields(getVariableNames())), context);
}
@@ -316,22 +312,16 @@ public class VariableOperators {
}
letExpression.put("vars", mappedVars);
if (expression != null) {
letExpression.put("in", getMappedIn(operationContext));
}
letExpression.put("in", getMappedIn(operationContext));
return new Document("$let", letExpression);
}
private Document getMappedVariable(ExpressionVariable var, AggregationOperationContext context) {
if (var.expression instanceof AggregationExpression expression) {
return new Document(var.variableName, expression.toDocument(context));
}
if (var.expression instanceof Field field) {
return new Document(var.variableName, context.getReference(field).toString());
}
return new Document(var.variableName, var.expression);
return new Document(var.variableName,
var.expression instanceof AggregationExpression ? ((AggregationExpression) var.expression).toDocument(context)
: var.expression);
}
private Object getMappedIn(AggregationOperationContext context) {
@@ -383,10 +373,6 @@ public class VariableOperators {
return new ExpressionVariable(variableName, expression);
}
public ExpressionVariable forField(String fieldRef) {
return new ExpressionVariable(variableName, Fields.field(fieldRef));
}
/**
* Create a new {@link ExpressionVariable} with current name and given {@literal expressionObject}.
*

View File

@@ -177,10 +177,10 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<Bson> implements M
return Alias.NONE;
}
if (source instanceof Document document) {
return Alias.ofNullable(document.get(typeKey));
} else if (source instanceof DBObject dbObject) {
return Alias.ofNullable(dbObject.get(typeKey));
if (source instanceof Document) {
return Alias.ofNullable(((Document) source).get(typeKey));
} else if (source instanceof DBObject) {
return Alias.ofNullable(((DBObject) source).get(typeKey));
}
throw new IllegalArgumentException("Cannot read alias from " + source.getClass());
@@ -190,10 +190,10 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<Bson> implements M
if (typeKey != null) {
if (sink instanceof Document document) {
document.put(typeKey, alias);
} else if (sink instanceof DBObject dbObject) {
dbObject.put(typeKey, alias);
if (sink instanceof Document) {
((Document) sink).put(typeKey, alias);
} else if (sink instanceof DBObject) {
((DBObject) sink).put(typeKey, alias);
}
}
}

View File

@@ -108,8 +108,8 @@ public class DefaultReferenceResolver implements ReferenceResolver {
private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source,
ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) {
return proxyFactory.createLazyLoadingProxy(property,
it -> referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader),
source instanceof DocumentReferenceSource documentSource ? documentSource.getTargetSource() : source);
return proxyFactory.createLazyLoadingProxy(property, it -> {
return referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader);
}, source instanceof DocumentReferenceSource ? ((DocumentReferenceSource)source).getTargetSource() : source);
}
}

View File

@@ -169,8 +169,8 @@ class DocumentAccessor {
Object existing = BsonUtils.asMap(source).get(key);
if (existing instanceof Document document) {
return document;
if (existing instanceof Document) {
return (Document) existing;
}
Document nested = new Document();

View File

@@ -73,8 +73,8 @@ class DocumentPointerFactory {
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
MongoPersistentProperty property, Object value, Class<?> typeHint) {
if (value instanceof LazyLoadingProxy proxy) {
return proxy::getSource;
if (value instanceof LazyLoadingProxy) {
return () -> ((LazyLoadingProxy) value).getSource();
}
if (conversionService.canConvert(typeHint, DocumentPointer.class)) {
@@ -94,8 +94,8 @@ class DocumentPointerFactory {
return () -> conversionService.convert(idValue, idProperty.getFieldType());
}
if (idValue instanceof String stringValue && ObjectId.isValid((String) idValue)) {
return () -> new ObjectId(stringValue);
if (idValue instanceof String && ObjectId.isValid((String) idValue)) {
return () -> new ObjectId((String) idValue);
}
return () -> idValue;
@@ -210,13 +210,13 @@ class DocumentPointerFactory {
lookup, entry.getKey()));
}
if (entry.getValue() instanceof Document document) {
if (entry.getValue() instanceof Document) {
MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey());
if (persistentProperty != null && persistentProperty.isEntity()) {
MongoPersistentEntity<?> nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType());
target.put(entry.getKey(), updatePlaceholders(document, new Document(), mappingContext,
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty))));
} else {
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,

View File

@@ -69,7 +69,7 @@ public class DocumentReferenceSource {
*/
@Nullable
static Object getTargetSource(Object source) {
return source instanceof DocumentReferenceSource referenceSource ? referenceSource.getTargetSource() : source;
return source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() : source;
}
/**
@@ -79,6 +79,6 @@ public class DocumentReferenceSource {
* @return
*/
static Object getSelf(Object self) {
return self instanceof DocumentReferenceSource referenceSource ? referenceSource.getSelf() : self;
return self instanceof DocumentReferenceSource ? ((DocumentReferenceSource) self).getSelf() : self;
}
}

Some files were not shown because too many files have changed in this diff Show More