Compare commits
2 Commits
4.1.0-RC1
...
issue/2496
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3cdcfa7d50 | ||
|
|
515ca43704 |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Thu Apr 06 16:16:28 CEST 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.1/apache-maven-3.9.1-bin.zip
|
||||
#Mon Feb 20 11:58:01 CET 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.0/apache-maven-3.9.0-bin.zip
|
||||
|
||||
47
Jenkinsfile
vendored
47
Jenkinsfile
vendored
@@ -77,29 +77,10 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (Java 20) + MongoDB 6.0') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk20-mongodb-6.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk20-mongodb-6.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (main)") {
|
||||
stage("test: baseline (Java 17)") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
@@ -138,7 +119,7 @@ pipeline {
|
||||
}
|
||||
parallel {
|
||||
|
||||
stage("test: MongoDB 5.0 (main)") {
|
||||
stage("test: MongoDB 5.0 (Java 17)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
@@ -160,7 +141,7 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: MongoDB 6.0 (main)") {
|
||||
stage("test: MongoDB 6.0 (Java 17)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
@@ -181,28 +162,6 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: MongoDB 6.0 (next)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||
# MongoDB 6.0 release signing key
|
||||
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
|
||||
# Needed when MongoDB creates a 6.0 folder.
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,10 +1,8 @@
|
||||
# Java versions
|
||||
java.main.tag=17.0.6_10-jdk-focal
|
||||
java.next.tag=20-jdk-jammy
|
||||
|
||||
# Docker container images - standard
|
||||
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
|
||||
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
|
||||
|
||||
# Supported versions of MongoDB
|
||||
docker.mongodb.4.4.version=4.4.18
|
||||
|
||||
12
pom.xml
12
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.0-RC1</version>
|
||||
<version>4.1.x-GH-2496-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>3.1.0-RC1</version>
|
||||
<version>3.1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>3.1.0-RC1</springdata.commons>
|
||||
<mongo>4.9.1</mongo>
|
||||
<springdata.commons>3.1.0-SNAPSHOT</springdata.commons>
|
||||
<mongo>4.9.0</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
@@ -145,8 +145,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.0-RC1</version>
|
||||
<version>4.1.x-GH-2496-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.0-RC1</version>
|
||||
<version>4.1.x-GH-2496-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>4.1.0-RC1</version>
|
||||
<version>4.1.x-GH-2496-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -16,13 +16,12 @@
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.UncategorizedDataAccessException;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
public class UncategorizedMongoDbException extends UncategorizedDataAccessException {
|
||||
|
||||
private static final long serialVersionUID = -2336595514062364929L;
|
||||
|
||||
public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) {
|
||||
public UncategorizedMongoDbException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.aot;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
@@ -26,6 +25,7 @@ import java.util.Set;
|
||||
import org.springframework.aot.generate.GenerationContext;
|
||||
import org.springframework.aot.hint.MemberCategory;
|
||||
import org.springframework.aot.hint.TypeReference;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.core.annotation.AnnotatedElementUtils;
|
||||
import org.springframework.core.annotation.MergedAnnotations;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
@@ -33,6 +33,7 @@ import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.util.TypeUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
@@ -65,7 +66,9 @@ public class LazyLoadingProxyAotProcessor {
|
||||
if (field.getType().isInterface()) {
|
||||
|
||||
List<Class<?>> interfaces = new ArrayList<>(
|
||||
Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces()));
|
||||
TypeUtils.resolveTypesInSignature(ResolvableType.forField(field, type)));
|
||||
|
||||
interfaces.add(0, org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class);
|
||||
interfaces.add(org.springframework.aop.SpringProxy.class);
|
||||
interfaces.add(org.springframework.aop.framework.Advised.class);
|
||||
interfaces.add(org.springframework.core.DecoratingProxy.class);
|
||||
@@ -74,7 +77,7 @@ public class LazyLoadingProxyAotProcessor {
|
||||
} else {
|
||||
|
||||
Class<?> proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(),
|
||||
LazyLoadingInterceptor::none);
|
||||
() -> LazyLoadingInterceptor.none());
|
||||
|
||||
// see: spring-projects/spring-framework/issues/29309
|
||||
generationContext.getRuntimeHints().reflection().registerType(proxyClass,
|
||||
|
||||
@@ -19,7 +19,6 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.Pair;
|
||||
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
@@ -29,15 +28,6 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
|
||||
* operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* {@link #execute()}.
|
||||
*
|
||||
* <pre class="code">
|
||||
* MongoOperations ops = …;
|
||||
*
|
||||
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
|
||||
* .insert(newPerson)
|
||||
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
|
||||
* .execute();
|
||||
* </pre>
|
||||
* <p>
|
||||
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
|
||||
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
|
||||
@@ -85,19 +75,7 @@ public interface BulkOperations {
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
default BulkOperations updateOne(Query query, Update update) {
|
||||
return updateOne(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
||||
*
|
||||
* @param query update criteria, must not be {@literal null}.
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations updateOne(Query query, UpdateDefinition update);
|
||||
BulkOperations updateOne(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
||||
@@ -105,7 +83,7 @@ public interface BulkOperations {
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates);
|
||||
BulkOperations updateOne(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
@@ -114,19 +92,7 @@ public interface BulkOperations {
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
default BulkOperations updateMulti(Query query, Update update) {
|
||||
return updateMulti(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, UpdateDefinition update);
|
||||
BulkOperations updateMulti(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
||||
@@ -134,7 +100,7 @@ public interface BulkOperations {
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates);
|
||||
BulkOperations updateMulti(List<Pair<Query, Update>> updates);
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
@@ -144,20 +110,7 @@ public interface BulkOperations {
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
default BulkOperations upsert(Query query, Update update) {
|
||||
return upsert(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations upsert(Query query, UpdateDefinition update);
|
||||
BulkOperations upsert(Query query, Update update);
|
||||
|
||||
/**
|
||||
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
@@ -189,7 +142,7 @@ public interface BulkOperations {
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
@@ -202,7 +155,7 @@ public interface BulkOperations {
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
@@ -1,221 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Support class for bulk operations.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
abstract class BulkOperationsSupport {
|
||||
|
||||
private final String collectionName;
|
||||
|
||||
BulkOperationsSupport(String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param holder
|
||||
*/
|
||||
void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param holder
|
||||
*/
|
||||
void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
WriteModel<Document> mapWriteModel(Object source, WriteModel<Document> writeModel) {
|
||||
|
||||
if (writeModel instanceof UpdateOneModel<Document> model) {
|
||||
|
||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
||||
|
||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
||||
}
|
||||
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof UpdateManyModel<Document> model) {
|
||||
|
||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
||||
|
||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
||||
}
|
||||
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteOneModel<Document> model) {
|
||||
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteManyModel<Document> model) {
|
||||
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
return writeModel;
|
||||
}
|
||||
|
||||
private List<Document> mapUpdatePipeline(AggregationUpdate source) {
|
||||
|
||||
Class<?> type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class;
|
||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type,
|
||||
updateMapper().getMappingContext(), queryMapper());
|
||||
|
||||
return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link ApplicationEvent} if event multicasting is enabled.
|
||||
*
|
||||
* @param event
|
||||
*/
|
||||
protected abstract void maybeEmitEvent(ApplicationEvent event);
|
||||
|
||||
/**
|
||||
* @return the {@link UpdateMapper} to use.
|
||||
*/
|
||||
protected abstract UpdateMapper updateMapper();
|
||||
|
||||
/**
|
||||
* @return the {@link QueryMapper} to use.
|
||||
*/
|
||||
protected abstract QueryMapper queryMapper();
|
||||
|
||||
/**
|
||||
* @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}.
|
||||
*/
|
||||
protected abstract Optional<? extends MongoPersistentEntity<?>> entity();
|
||||
|
||||
protected Bson getMappedUpdate(Bson update) {
|
||||
return updateMapper().getMappedObject(update, entity());
|
||||
}
|
||||
|
||||
protected Bson getMappedQuery(Bson query) {
|
||||
return queryMapper().getMappedObject(query, entity());
|
||||
}
|
||||
|
||||
protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
return switch (bulkMode) {
|
||||
case ORDERED -> options.ordered(true);
|
||||
case UNORDERED -> options.ordered(false);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
protected static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
record SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
}
|
||||
}
|
||||
@@ -16,47 +16,42 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
import com.mongodb.client.model.*;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -72,7 +67,7 @@ import com.mongodb.client.model.WriteModel;
|
||||
* @author Jacob Botuck
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations {
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
@@ -80,6 +75,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOptions bulkOptions;
|
||||
|
||||
/**
|
||||
@@ -94,7 +90,6 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
DefaultBulkOperations(MongoOperations mongoOperations, String collectionName,
|
||||
BulkOperationContext bulkOperationContext) {
|
||||
|
||||
super(collectionName);
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
|
||||
@@ -102,7 +97,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.bulkOperationContext = bulkOperationContext;
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -137,20 +132,21 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkOperations updateOne(Query query, UpdateDefinition update) {
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateOne(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
return update(query, update, false, false);
|
||||
return updateOne(Collections.singletonList(Pair.of(query, update)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates) {
|
||||
public BulkOperations updateOne(List<Pair<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null");
|
||||
|
||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, false);
|
||||
}
|
||||
|
||||
@@ -158,22 +154,21 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkOperations updateMulti(Query query, UpdateDefinition update) {
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateMulti(Query query, Update update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
update(query, update, false, true);
|
||||
|
||||
return this;
|
||||
return updateMulti(Collections.singletonList(Pair.of(query, update)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates) {
|
||||
public BulkOperations updateMulti(List<Pair<Query, Update>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null");
|
||||
|
||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, true);
|
||||
}
|
||||
|
||||
@@ -181,7 +176,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkOperations upsert(Query query, UpdateDefinition update) {
|
||||
public BulkOperations upsert(Query query, Update update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
@@ -253,7 +248,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -272,8 +267,9 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
bulkOptions);
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
if (ex instanceof MongoBulkWriteException mongoBulkWriteException) {
|
||||
if (ex instanceof MongoBulkWriteException) {
|
||||
|
||||
MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex;
|
||||
if (mongoBulkWriteException.getWriteConcernError() != null) {
|
||||
throw new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
@@ -288,17 +284,17 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.model() instanceof InsertOneModel) {
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.model()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
||||
} else if (it.model() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.model()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.source(), it.model());
|
||||
return mapWriteModel(it.getModel());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -310,7 +306,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
|
||||
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
@@ -326,24 +322,47 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void maybeEmitEvent(ApplicationEvent event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
private WriteModel<Document> mapWriteModel(WriteModel<Document> writeModel) {
|
||||
|
||||
if (writeModel instanceof UpdateOneModel) {
|
||||
|
||||
UpdateOneModel<Document> model = (UpdateOneModel<Document>) writeModel;
|
||||
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof UpdateManyModel) {
|
||||
|
||||
UpdateManyModel<Document> model = (UpdateManyModel<Document>) writeModel;
|
||||
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteOneModel) {
|
||||
|
||||
DeleteOneModel<Document> model = (DeleteOneModel<Document>) writeModel;
|
||||
|
||||
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteManyModel) {
|
||||
|
||||
DeleteManyModel<Document> model = (DeleteManyModel<Document>) writeModel;
|
||||
|
||||
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
return writeModel;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UpdateMapper updateMapper() {
|
||||
return bulkOperationContext.updateMapper();
|
||||
private Bson getMappedUpdate(Bson update) {
|
||||
return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected QueryMapper queryMapper() {
|
||||
return bulkOperationContext.queryMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<? extends MongoPersistentEntity<?>> entity() {
|
||||
return bulkOperationContext.entity();
|
||||
private Bson getMappedQuery(Bson query) {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
@@ -362,83 +381,268 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void publishEvent(MongoMappingEvent<?> event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName);
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName);
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName);
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
switch (bulkMode) {
|
||||
case ORDERED:
|
||||
return options.ordered(true);
|
||||
case UNORDERED:
|
||||
return options.ordered(false);
|
||||
}
|
||||
|
||||
throw new IllegalStateException("BulkMode was null");
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link BulkOperationContext} holds information about
|
||||
* {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to
|
||||
* {@link QueryMapper} and {@link UpdateMapper}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
record BulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
|
||||
@Nullable EntityCallbacks entityCallbacks) {
|
||||
static final class BulkOperationContext {
|
||||
|
||||
public boolean skipEntityCallbacks() {
|
||||
return entityCallbacks == null;
|
||||
private final BulkMode bulkMode;
|
||||
private final Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final ApplicationEventPublisher eventPublisher;
|
||||
private final EntityCallbacks entityCallbacks;
|
||||
|
||||
BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher,
|
||||
EntityCallbacks entityCallbacks) {
|
||||
|
||||
this.bulkMode = bulkMode;
|
||||
this.entity = entity;
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.eventPublisher = eventPublisher;
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
public boolean skipEventPublishing() {
|
||||
return eventPublisher == null;
|
||||
public BulkMode getBulkMode() {
|
||||
return this.bulkMode;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
|
||||
public Optional<? extends MongoPersistentEntity<?>> getEntity() {
|
||||
return this.entity;
|
||||
}
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return entity;
|
||||
public QueryMapper getQueryMapper() {
|
||||
return this.queryMapper;
|
||||
}
|
||||
|
||||
public UpdateMapper getUpdateMapper() {
|
||||
return this.updateMapper;
|
||||
}
|
||||
|
||||
public ApplicationEventPublisher getEventPublisher() {
|
||||
return this.eventPublisher;
|
||||
}
|
||||
|
||||
public EntityCallbacks getEntityCallbacks() {
|
||||
return this.entityCallbacks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
BulkOperationContext that = (BulkOperationContext) o;
|
||||
|
||||
if (bulkMode != that.bulkMode)
|
||||
return false;
|
||||
if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, collectionName);
|
||||
if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks);
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
|
||||
String collectionName) {
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return entity;
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, document, collectionName);
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = bulkMode != null ? bulkMode.hashCode() : 0;
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entity);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks);
|
||||
return result;
|
||||
}
|
||||
|
||||
public void publishEvent(ApplicationEvent event) {
|
||||
|
||||
if (skipEventPublishing()) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventPublisher.publishEvent(event);
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity="
|
||||
+ this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper()
|
||||
+ ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static final class SourceAwareWriteModelHolder {
|
||||
|
||||
private final Object source;
|
||||
private final WriteModel<Document> model;
|
||||
|
||||
SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
|
||||
this.source = source;
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
public Object getSource() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public WriteModel<Document> getModel() {
|
||||
return this.model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.source, that.source)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.model, that.model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(model);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(source);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model="
|
||||
+ this.getModel() + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,6 @@ import java.util.List;
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -157,20 +155,6 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
|
||||
|
||||
Document indexOptions = new Document("name", name);
|
||||
indexOptions.putAll(options.toDocument());
|
||||
|
||||
Document result = mongoOperations
|
||||
.execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)));
|
||||
|
||||
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
|
||||
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
|
||||
}
|
||||
}
|
||||
|
||||
public void dropAllIndexes() {
|
||||
dropIndex("*");
|
||||
}
|
||||
|
||||
@@ -1,390 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link ReactiveBulkOperations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations {
|
||||
|
||||
private final ReactiveMongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final ReactiveBulkOperationContext bulkOperationContext;
|
||||
private final List<Mono<SourceAwareWriteModelHolder>> models = new ArrayList<>();
|
||||
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOptions bulkOptions;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and
|
||||
* {@link ReactiveBulkOperationContext}.
|
||||
*
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @param bulkOperationContext must not be {@literal null}.
|
||||
*/
|
||||
DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName,
|
||||
ReactiveBulkOperationContext bulkOperationContext) {
|
||||
|
||||
super(collectionName);
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.bulkOperationContext = bulkOperationContext;
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
||||
*
|
||||
* @param defaultWriteConcern can be {@literal null}.
|
||||
*/
|
||||
void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) {
|
||||
this.defaultWriteConcern = defaultWriteConcern;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations insert(Object document) {
|
||||
|
||||
Assert.notNull(document, "Document must not be null");
|
||||
|
||||
this.models.add(Mono.just(document).flatMap(it -> {
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
|
||||
return maybeInvokeBeforeConvertCallback(it);
|
||||
}).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it)))));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations insert(List<? extends Object> documents) {
|
||||
|
||||
Assert.notNull(documents, "Documents must not be null");
|
||||
|
||||
documents.forEach(this::insert);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
update(query, update, false, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
update(query, update, false, true);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations remove(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
|
||||
this.models.add(Mono.just(query)
|
||||
.map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions))));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations remove(List<Query> removes) {
|
||||
|
||||
Assert.notNull(removes, "Removals must not be null");
|
||||
|
||||
for (Query query : removes) {
|
||||
remove(query);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(replacement, "Replacement must not be null");
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
this.models.add(Mono.just(replacement).flatMap(it -> {
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
|
||||
return maybeInvokeBeforeConvertCallback(it);
|
||||
}).map(it -> new SourceAwareWriteModelHolder(it,
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(it), replaceOptions))));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<BulkWriteResult> execute() {
|
||||
|
||||
try {
|
||||
return mongoOperations.execute(collectionName, this::bulkWriteTo).next();
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
private Mono<BulkWriteResult> bulkWriteTo(MongoCollection<Document> collection) {
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
Flux<SourceAwareWriteModelHolder> concat = Flux.concat(models).flatMap(it -> {
|
||||
|
||||
if (it.model()instanceof InsertOneModel<Document> iom) {
|
||||
|
||||
Document target = iom.getDocument();
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
return maybeInvokeBeforeSaveCallback(it.source(), target)
|
||||
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom)));
|
||||
} else if (it.model()instanceof ReplaceOneModel<Document> rom) {
|
||||
|
||||
Document target = rom.getReplacement();
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
return maybeInvokeBeforeSaveCallback(it.source(), target)
|
||||
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom)));
|
||||
}
|
||||
|
||||
return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model())));
|
||||
});
|
||||
|
||||
MongoCollection<Document> theCollection = collection;
|
||||
return concat.collectList().flatMap(it -> {
|
||||
|
||||
return Mono
|
||||
.from(theCollection
|
||||
.bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions))
|
||||
.doOnSuccess(state -> {
|
||||
it.forEach(this::maybeEmitAfterSaveEvent);
|
||||
}).flatMap(state -> {
|
||||
List<Mono<Object>> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList());
|
||||
|
||||
return Flux.concat(monos).then(Mono.just(state));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
* @param query the {@link Query} to determine documents to update.
|
||||
* @param update the {@link Update} to perform, must not be {@literal null}.
|
||||
* @param upsert whether to upsert.
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
||||
|
||||
this.models.add(Mono.just(update).map(it -> {
|
||||
if (multi) {
|
||||
return new SourceAwareWriteModelHolder(update,
|
||||
new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options));
|
||||
}
|
||||
return new SourceAwareWriteModelHolder(update,
|
||||
new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options));
|
||||
}));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void maybeEmitEvent(ApplicationEvent event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UpdateMapper updateMapper() {
|
||||
return bulkOperationContext.updateMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected QueryMapper queryMapper() {
|
||||
return bulkOperationContext.queryMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<? extends MongoPersistentEntity<?>> entity() {
|
||||
return bulkOperationContext.entity();
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
return maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
return maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
}
|
||||
return Mono.just(holder.source());
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeBeforeConvertCallback(Object value) {
|
||||
return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as
|
||||
* references to {@link QueryMapper} and {@link UpdateMapper}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
record ReactiveBulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
|
||||
@Nullable ReactiveEntityCallbacks entityCallbacks) {
|
||||
|
||||
public boolean skipEntityCallbacks() {
|
||||
return entityCallbacks == null;
|
||||
}
|
||||
|
||||
public boolean skipEventPublishing() {
|
||||
return eventPublisher == null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return Mono.just(entity);
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, collectionName);
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
|
||||
String collectionName) {
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return Mono.just(entity);
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, document, collectionName);
|
||||
}
|
||||
|
||||
public void publishEvent(ApplicationEvent event) {
|
||||
|
||||
if (skipEventPublishing()) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventPublisher.publishEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -22,7 +22,6 @@ import java.util.Collection;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.index.ReactiveIndexOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
|
||||
import com.mongodb.client.model.IndexOptions;
|
||||
|
||||
@@ -106,22 +104,6 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
}).next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
|
||||
|
||||
return mongoOperations.execute(db -> {
|
||||
Document indexOptions = new Document("name", name);
|
||||
indexOptions.putAll(options.toDocument());
|
||||
|
||||
return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)))
|
||||
.doOnNext(result -> {
|
||||
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
|
||||
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
|
||||
}
|
||||
});
|
||||
}).then();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private MongoPersistentEntity<?> lookupPersistentEntity(String collection) {
|
||||
|
||||
|
||||
@@ -119,10 +119,6 @@ abstract class IndexConverters {
|
||||
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
||||
}
|
||||
|
||||
if (indexOptions.containsKey("hidden")) {
|
||||
ops = ops.hidden((Boolean) indexOptions.get("hidden"));
|
||||
}
|
||||
|
||||
return ops;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -74,16 +74,7 @@ import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Buil
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.JsonSchemaMapper;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.convert.*;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher;
|
||||
@@ -119,16 +110,7 @@ import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.AggregateIterable;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.DistinctIterable;
|
||||
import com.mongodb.client.FindIterable;
|
||||
import com.mongodb.client.MapReduceIterable;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoCursor;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
import com.mongodb.client.MongoIterable;
|
||||
import com.mongodb.client.*;
|
||||
import com.mongodb.client.model.*;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
@@ -768,8 +750,8 @@ public class MongoTemplate
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkOperations bulkOps(BulkMode mode, String collectionName) {
|
||||
return bulkOps(mode, null, collectionName);
|
||||
public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) {
|
||||
return bulkOps(bulkMode, null, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -889,7 +871,7 @@ public class MongoTemplate
|
||||
Assert.notNull(targetClass, "Target type must not be null");
|
||||
|
||||
EntityProjection<T, ?> projection = operations.introspectProjection(targetClass, sourceClass);
|
||||
ProjectingReadCallback<?, T> callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName);
|
||||
ProjectingReadCallback<?,T> callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName);
|
||||
int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE;
|
||||
|
||||
if (query.hasKeyset()) {
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and
|
||||
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
|
||||
* operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* {@link #execute()}.
|
||||
*
|
||||
* <pre class="code">
|
||||
* ReactiveMongoOperations ops = …;
|
||||
*
|
||||
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
|
||||
* .insert(newPerson)
|
||||
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
|
||||
* .execute();
|
||||
* </pre>
|
||||
* <p>
|
||||
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
|
||||
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
|
||||
* the version field remains not populated.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 4.1
|
||||
*/
|
||||
public interface ReactiveBulkOperations {
|
||||
|
||||
/**
|
||||
* Add a single insert to the bulk operation.
|
||||
*
|
||||
* @param documents the document to insert, must not be {@literal null}.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations insert(Object documents);
|
||||
|
||||
/**
|
||||
* Add a list of inserts to the bulk operation.
|
||||
*
|
||||
* @param documents List of documents to insert, must not be {@literal null}.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations insert(List<? extends Object> documents);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
||||
*
|
||||
* @param query update criteria, must not be {@literal null}.
|
||||
* @param update {@link UpdateDefinition} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations updateOne(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations upsert(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a single remove operation to the bulk operation.
|
||||
*
|
||||
* @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations remove(Query remove);
|
||||
|
||||
/**
|
||||
* Add a list of remove operations to the bulk operation.
|
||||
*
|
||||
* @param removes the remove operations to perform, must not be {@literal null}.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
*/
|
||||
default ReactiveBulkOperations replaceOne(Query query, Object replacement) {
|
||||
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
*/
|
||||
ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
* @return a {@link Mono} emitting the result of the bulk operation providing counters for inserts/updates etc.
|
||||
*/
|
||||
Mono<BulkWriteResult> execute();
|
||||
}
|
||||
@@ -29,7 +29,6 @@ import org.springframework.data.domain.KeysetScrollPosition;
|
||||
import org.springframework.data.domain.Window;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
@@ -350,40 +349,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*/
|
||||
Mono<Void> dropCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Returns a new {@link ReactiveBulkOperations} for the given collection. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, etc. is not available for {@literal update} or
|
||||
* {@literal remove} operations in bulk mode due to the lack of domain type information. Use
|
||||
* {@link #bulkOps(BulkMode, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link ReactiveBulkOperations} on the named collection
|
||||
* @since 4.1
|
||||
*/
|
||||
ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns a new {@link ReactiveBulkOperations} for the given entity type.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityClass the name of the entity class, must not be {@literal null}.
|
||||
* @return {@link ReactiveBulkOperations} on the named collection associated of the given entity class.
|
||||
* @since 4.1
|
||||
*/
|
||||
ReactiveBulkOperations bulkOps(BulkMode mode, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns a new {@link ReactiveBulkOperations} for the given entity type and collection name.
|
||||
*
|
||||
* @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}.
|
||||
* @param entityType the name of the entity class. Can be {@literal null}.
|
||||
* @param collectionName the name of the collection to work on, must not be {@literal null} or empty.
|
||||
* @return {@link ReactiveBulkOperations} on the named collection associated with the given entity class.
|
||||
* @since 4.1
|
||||
*/
|
||||
ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class<?> entityType, String collectionName);
|
||||
|
||||
/**
|
||||
* Query for a {@link Flux} of objects of type T from the collection used by the entity class. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
@@ -1782,7 +1747,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Flux<T> mapReduce(Query filterQuery, Class<?> domainType, String inputCollectionName, Class<T> resultType,
|
||||
String mapFunction, String reduceFunction, MapReduceOptions options);
|
||||
|
||||
|
||||
/**
|
||||
* Returns the underlying {@link MongoConverter}.
|
||||
*
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.DefaultReactiveDbRefResolver;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.function.Tuple2;
|
||||
@@ -72,9 +73,7 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.SessionSynchronization;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate;
|
||||
import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
|
||||
@@ -203,6 +202,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION;
|
||||
|
||||
private CountExecution countExecution = this::doExactCount;
|
||||
private DefaultReactiveDbRefResolver dbRefResolver;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor used for a basic template configuration.
|
||||
@@ -738,6 +739,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
public <T> Mono<Void> dropCollection(Class<T> entityClass) {
|
||||
return dropCollection(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> dropCollection(String collectionName) {
|
||||
|
||||
@@ -748,31 +750,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}).then();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName) {
|
||||
return bulkOps(mode, null, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations bulkOps(BulkMode mode, Class<?> entityClass) {
|
||||
return bulkOps(mode, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class<?> entityType, String collectionName) {
|
||||
|
||||
Assert.notNull(mode, "BulkMode must not be null");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty");
|
||||
|
||||
DefaultReactiveBulkOperations operations = new DefaultReactiveBulkOperations(this, collectionName,
|
||||
new ReactiveBulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper,
|
||||
updateMapper, eventPublisher, entityCallbacks));
|
||||
|
||||
operations.setDefaultWriteConcern(writeConcern);
|
||||
|
||||
return operations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Flux<String> getCollectionNames() {
|
||||
return createFlux(MongoDatabase::listCollectionNames);
|
||||
@@ -3059,14 +3036,15 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
|
||||
T entity = reader.read(type, document);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
return maybeCallAfterConvert(entity, document, collectionName);
|
||||
return ReactiveValueResolver.prepareDbRefResolution(Mono.just(document), new DefaultReactiveDbRefResolver(getMongoDatabaseFactory()))
|
||||
.map(it -> {
|
||||
T entity = reader.read(type, it);
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
return entity;
|
||||
}).flatMap(it -> maybeCallAfterConvert(it, document, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3099,15 +3077,20 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Class<T> returnType = projection.getMappedType().getType();
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, returnType, collectionName));
|
||||
|
||||
Object entity = reader.project(projection, document);
|
||||
dbRefResolver = new DefaultReactiveDbRefResolver(getMongoDatabaseFactory());
|
||||
return ReactiveValueResolver.prepareDbRefResolution(Mono.just(document), dbRefResolver)
|
||||
.map(it -> {
|
||||
Object entity = reader.project(projection, document);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
|
||||
T castEntity = (T) entity;
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
T castEntity = (T) entity;
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return castEntity;
|
||||
})
|
||||
.flatMap(it -> maybeCallAfterConvert(it, document, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.convert.ReactiveDbRefResolver;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.1
|
||||
*/
|
||||
class ReactiveValueResolver {
|
||||
|
||||
static Mono<Document> prepareDbRefResolution(Mono<Document> root, ReactiveDbRefResolver dbRefResolver) {
|
||||
return root.flatMap(source -> {
|
||||
for (Entry<String, Object> entry : source.entrySet()) {
|
||||
Object value = entry.getValue();
|
||||
if (value instanceof DBRef dbRef) {
|
||||
return prepareDbRefResolution(dbRefResolver.initFetch(dbRef).defaultIfEmpty(new Document())
|
||||
.flatMap(it -> prepareDbRefResolution(Mono.just(it), dbRefResolver)).map(resolved -> {
|
||||
source.put(entry.getKey(), resolved.isEmpty() ? null : resolved);
|
||||
return source;
|
||||
}), dbRefResolver);
|
||||
}
|
||||
if (value instanceof Document nested) {
|
||||
return prepareDbRefResolution(Mono.just(nested), dbRefResolver).map(it -> {
|
||||
source.put(entry.getKey(), it);
|
||||
return source;
|
||||
});
|
||||
}
|
||||
if (value instanceof List<?> list) {
|
||||
return Flux.fromIterable(list).concatMap(it -> {
|
||||
if (it instanceof DBRef dbRef) {
|
||||
return prepareDbRefResolution(dbRefResolver.initFetch(dbRef), dbRefResolver);
|
||||
}
|
||||
if (it instanceof Document document) {
|
||||
return prepareDbRefResolution(Mono.just(document), dbRefResolver);
|
||||
}
|
||||
return Mono.just(it);
|
||||
}).collectList().map(resolved -> {
|
||||
source.put(entry.getKey(), resolved.isEmpty() ? null : resolved);
|
||||
return source;
|
||||
});
|
||||
}
|
||||
}
|
||||
return Mono.just(source);
|
||||
});
|
||||
}
|
||||
|
||||
public Mono<Document> resolveValues(Mono<Document> document) {
|
||||
|
||||
return document.flatMap(source -> {
|
||||
for (Entry<String, Object> entry : source.entrySet()) {
|
||||
Object val = entry.getValue();
|
||||
if (val instanceof Mono<?> valueMono) {
|
||||
return valueMono.flatMap(value -> {
|
||||
source.put(entry.getKey(), value);
|
||||
return resolveValues(Mono.just(source));
|
||||
});
|
||||
}
|
||||
if (entry.getValue()instanceof Document nested) {
|
||||
return resolveValues(Mono.just(nested)).map(it -> {
|
||||
source.put(entry.getKey(), it);
|
||||
return source;
|
||||
});
|
||||
}
|
||||
if (entry.getValue() instanceof List<?>) {
|
||||
// do traverse list
|
||||
}
|
||||
}
|
||||
return Mono.just(source);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.1
|
||||
*/
|
||||
public class DefaultReactiveDbRefResolver implements ReactiveDbRefResolver {
|
||||
|
||||
ReactiveMongoDatabaseFactory dbFactory;
|
||||
|
||||
public DefaultReactiveDbRefResolver(ReactiveMongoDatabaseFactory dbFactory) {
|
||||
this.dbFactory = dbFactory;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Mono<Object> resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref,
|
||||
DbRefResolverCallback callback, DbRefProxyHandler proxyHandler) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Document fetch(DBRef dbRef) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Document> bulkFetch(List<DBRef> dbRefs) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Mono<Document> initFetch(DBRef dbRef) {
|
||||
|
||||
Mono<MongoDatabase> mongoDatabase = StringUtils.hasText(dbRef.getDatabaseName())
|
||||
? dbFactory.getMongoDatabase(dbRef.getDatabaseName())
|
||||
: dbFactory.getMongoDatabase();
|
||||
return mongoDatabase
|
||||
.flatMap(db -> Mono.from(db.getCollection(dbRef.getCollectionName()).find(new Document("_id", dbRef.getId()))));
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Mono<Object> resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
|
||||
if (source instanceof DBRef dbRef) {
|
||||
|
||||
}
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
@@ -72,7 +72,7 @@ public final class LazyLoadingProxyFactory {
|
||||
/**
|
||||
* Predict the proxy target type. This will advice the infrastructure to resolve as many pieces as possible in a
|
||||
* potential AOT scenario without necessarily resolving the entire object.
|
||||
*
|
||||
*
|
||||
* @param propertyType the type to proxy
|
||||
* @param interceptor the interceptor to be added.
|
||||
* @return the proxy type.
|
||||
@@ -90,30 +90,16 @@ public final class LazyLoadingProxyFactory {
|
||||
.getProxyClass(LazyLoadingProxy.class.getClassLoader());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the {@link ProxyFactory} for the given type, already adding required additional interfaces.
|
||||
*
|
||||
* @param targetType the type to proxy.
|
||||
* @return the prepared {@link ProxyFactory}.
|
||||
* @since 4.0.5
|
||||
*/
|
||||
public static ProxyFactory prepareFactory(Class<?> targetType) {
|
||||
private ProxyFactory prepareProxyFactory(Class<?> propertyType, Supplier<LazyLoadingInterceptor> interceptor) {
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
|
||||
for (Class<?> type : targetType.getInterfaces()) {
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(targetType);
|
||||
|
||||
return proxyFactory;
|
||||
}
|
||||
|
||||
private ProxyFactory prepareProxyFactory(Class<?> propertyType, Supplier<LazyLoadingInterceptor> interceptor) {
|
||||
|
||||
ProxyFactory proxyFactory = prepareFactory(propertyType);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor.get());
|
||||
|
||||
return proxyFactory;
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import javax.print.Doc;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.*;
|
||||
@@ -2443,7 +2444,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
}
|
||||
|
||||
private static class PropertyTranslatingPropertyAccessor<T> implements PersistentPropertyAccessor<T> {
|
||||
private static class PropertyTranslatingPropertyAccessor<T> implements PersistentPropertyPathAccessor<T> {
|
||||
|
||||
private final PersistentPropertyAccessor<T> delegate;
|
||||
private final PersistentPropertyTranslator propertyTranslator;
|
||||
@@ -2460,7 +2461,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentProperty<?> property, @Nullable Object value) {
|
||||
public void setProperty(PersistentProperty property, @Nullable Object value) {
|
||||
delegate.setProperty(translate(property), value);
|
||||
}
|
||||
|
||||
@@ -2474,6 +2475,23 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return delegate.getBean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path, Object value,
|
||||
AccessOptions.SetOptions options) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path,
|
||||
AccessOptions.GetOptions context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private MongoPersistentProperty translate(PersistentProperty<?> property) {
|
||||
return propertyTranslator.translate((MongoPersistentProperty) property);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.1
|
||||
*/
|
||||
public interface ReactiveDbRefResolver extends DbRefResolver {
|
||||
|
||||
@Nullable
|
||||
default Mono<Document> initFetch(DBRef dbRef) {
|
||||
return Mono.justOrEmpty(fetch(dbRef));
|
||||
}
|
||||
|
||||
Mono<Object> resolveReference(MongoPersistentProperty property, Object source, ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader);
|
||||
|
||||
Mono<Object> resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler proxyHandler);
|
||||
}
|
||||
@@ -24,7 +24,6 @@ import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.index.IndexOptions.Unique;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -40,9 +39,10 @@ public class Index implements IndexDefinition {
|
||||
|
||||
private final Map<String, Direction> fieldSpec = new LinkedHashMap<String, Direction>();
|
||||
private @Nullable String name;
|
||||
private boolean unique = false;
|
||||
private boolean sparse = false;
|
||||
private boolean background = false;
|
||||
private final IndexOptions options = IndexOptions.none();
|
||||
private long expire = -1;
|
||||
private Optional<IndexFilter> filter = Optional.empty();
|
||||
private Optional<Collation> collation = Optional.empty();
|
||||
|
||||
@@ -70,8 +70,7 @@ public class Index implements IndexDefinition {
|
||||
* "https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
*/
|
||||
public Index unique() {
|
||||
|
||||
this.options.setUnique(Unique.YES);
|
||||
this.unique = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -99,20 +98,6 @@ public class Index implements IndexDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hidden indexes are not visible to the query planner and cannot be used to support a query.
|
||||
*
|
||||
* @return this.
|
||||
* @see <a href=
|
||||
* "https://www.mongodb.com/docs/manual/core/index-hidden/">https://www.mongodb.com/docs/manual/core/index-hidden/</a>
|
||||
* @since 4.1
|
||||
*/
|
||||
public Index hidden() {
|
||||
|
||||
options.setHidden(true);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies TTL in seconds.
|
||||
*
|
||||
@@ -149,7 +134,7 @@ public class Index implements IndexDefinition {
|
||||
public Index expire(long value, TimeUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TimeUnit for expiration must not be null");
|
||||
options.setExpire(Duration.ofSeconds(unit.toSeconds(value)));
|
||||
this.expire = unit.toSeconds(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -201,13 +186,18 @@ public class Index implements IndexDefinition {
|
||||
if (StringUtils.hasText(name)) {
|
||||
document.put("name", name);
|
||||
}
|
||||
if (unique) {
|
||||
document.put("unique", true);
|
||||
}
|
||||
if (sparse) {
|
||||
document.put("sparse", true);
|
||||
}
|
||||
if (background) {
|
||||
document.put("background", true);
|
||||
}
|
||||
document.putAll(options.toDocument());
|
||||
if (expire >= 0) {
|
||||
document.put("expireAfterSeconds", expire);
|
||||
}
|
||||
|
||||
filter.ifPresent(val -> document.put("partialFilterExpression", val.getFilterObject()));
|
||||
collation.ifPresent(val -> document.append("collation", val.toDocument()));
|
||||
|
||||
@@ -52,7 +52,6 @@ public class IndexInfo {
|
||||
private final boolean unique;
|
||||
private final boolean sparse;
|
||||
private final String language;
|
||||
private final boolean hidden;
|
||||
private @Nullable Duration expireAfter;
|
||||
private @Nullable String partialFilterExpression;
|
||||
private @Nullable Document collation;
|
||||
@@ -65,17 +64,6 @@ public class IndexInfo {
|
||||
this.unique = unique;
|
||||
this.sparse = sparse;
|
||||
this.language = language;
|
||||
this.hidden = false;
|
||||
}
|
||||
|
||||
public IndexInfo(List<IndexField> indexFields, String name, boolean unique, boolean sparse, String language, boolean hidden) {
|
||||
|
||||
this.indexFields = Collections.unmodifiableList(indexFields);
|
||||
this.name = name;
|
||||
this.unique = unique;
|
||||
this.sparse = sparse;
|
||||
this.language = language;
|
||||
this.hidden = hidden;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -129,15 +117,14 @@ public class IndexInfo {
|
||||
|
||||
String name = sourceDocument.get("name").toString();
|
||||
|
||||
boolean unique = sourceDocument.get("unique", false);
|
||||
boolean sparse = sourceDocument.get("sparse", false);
|
||||
boolean hidden = sourceDocument.getBoolean("hidden", false);
|
||||
String language = sourceDocument.containsKey("default_language") ? sourceDocument.getString("default_language")
|
||||
boolean unique = sourceDocument.containsKey("unique") ? (Boolean) sourceDocument.get("unique") : false;
|
||||
boolean sparse = sourceDocument.containsKey("sparse") ? (Boolean) sourceDocument.get("sparse") : false;
|
||||
String language = sourceDocument.containsKey("default_language") ? (String) sourceDocument.get("default_language")
|
||||
: "";
|
||||
|
||||
String partialFilter = extractPartialFilterString(sourceDocument);
|
||||
|
||||
IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language, hidden);
|
||||
IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language);
|
||||
info.partialFilterExpression = partialFilter;
|
||||
info.collation = sourceDocument.get("collation", Document.class);
|
||||
|
||||
@@ -272,17 +259,12 @@ public class IndexInfo {
|
||||
return getIndexFields().stream().anyMatch(IndexField::isWildcard);
|
||||
}
|
||||
|
||||
public boolean isHidden() {
|
||||
return hidden;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", sparse=" + sparse
|
||||
+ ", language=" + language + ", partialFilterExpression=" + partialFilterExpression + ", collation=" + collation
|
||||
+ ", expireAfterSeconds=" + ObjectUtils.nullSafeToString(expireAfter) + ", hidden=" + hidden + "]";
|
||||
+ ", expireAfterSeconds=" + ObjectUtils.nullSafeToString(expireAfter) + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -297,7 +279,6 @@ public class IndexInfo {
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(partialFilterExpression);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(collation);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(expireAfter);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(hidden);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -345,9 +326,6 @@ public class IndexInfo {
|
||||
if (!ObjectUtils.nullSafeEquals(expireAfter, other.expireAfter)) {
|
||||
return false;
|
||||
}
|
||||
if (hidden != other.hidden) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@@ -35,14 +35,6 @@ public interface IndexOperations {
|
||||
*/
|
||||
String ensureIndex(IndexDefinition indexDefinition);
|
||||
|
||||
/**
|
||||
* Drops an index from this collection.
|
||||
*
|
||||
* @param name name of index to hide.
|
||||
* @since 4.1
|
||||
*/
|
||||
void alterIndex(String name, IndexOptions options);
|
||||
|
||||
/**
|
||||
* Drops an index from this collection.
|
||||
*
|
||||
|
||||
@@ -50,11 +50,6 @@ public interface IndexOperationsAdapter extends IndexOperations {
|
||||
reactiveIndexOperations.dropIndex(name).block();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void alterIndex(String name, IndexOptions options) {
|
||||
reactiveIndexOperations.alterIndex(name, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dropAllIndexes() {
|
||||
reactiveIndexOperations.dropAllIndexes().block();
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.time.Duration;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Changeable properties of an index. Can be used for index creation and modification.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 4.1
|
||||
*/
|
||||
public class IndexOptions {
|
||||
|
||||
@Nullable
|
||||
private Duration expire;
|
||||
|
||||
@Nullable
|
||||
private Boolean hidden;
|
||||
|
||||
@Nullable
|
||||
private Unique unique;
|
||||
|
||||
public enum Unique {
|
||||
|
||||
NO,
|
||||
|
||||
/**
|
||||
* When unique is true the index rejects duplicate entries.
|
||||
*/
|
||||
YES,
|
||||
|
||||
/**
|
||||
* An existing index is not checked for pre-existing, duplicate index entries but inserting new duplicate entries
|
||||
* fails.
|
||||
*/
|
||||
PREPARE
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new empty instance of {@link IndexOptions}.
|
||||
*/
|
||||
public static IndexOptions none() {
|
||||
return new IndexOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link IndexOptions} having the {@link Unique#YES} flag set.
|
||||
*/
|
||||
public static IndexOptions unique() {
|
||||
|
||||
IndexOptions options = new IndexOptions();
|
||||
options.unique = Unique.YES;
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link IndexOptions} having the hidden flag set.
|
||||
*/
|
||||
public static IndexOptions hidden() {
|
||||
|
||||
IndexOptions options = new IndexOptions();
|
||||
options.hidden = true;
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link IndexOptions} with given expiration.
|
||||
*/
|
||||
public static IndexOptions expireAfter(Duration duration) {
|
||||
|
||||
IndexOptions options = new IndexOptions();
|
||||
options.unique = Unique.YES;
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the expiration time. A {@link Duration#isNegative() negative value} represents no expiration, {@literal null} if not set.
|
||||
*/
|
||||
public Duration getExpire() {
|
||||
return expire;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param expire must not be {@literal null}.
|
||||
*/
|
||||
public void setExpire(Duration expire) {
|
||||
this.expire = expire;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if hidden, {@literal null} if not set.
|
||||
*/
|
||||
@Nullable
|
||||
public Boolean isHidden() {
|
||||
return hidden;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param hidden
|
||||
*/
|
||||
public void setHidden(boolean hidden) {
|
||||
this.hidden = hidden;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the unique property value, {@literal null} if not set.
|
||||
*/
|
||||
@Nullable
|
||||
public Unique getUnique() {
|
||||
return unique;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param unique must not be {@literal null}.
|
||||
*/
|
||||
public void setUnique(Unique unique) {
|
||||
this.unique = unique;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the store native representation
|
||||
*/
|
||||
public Document toDocument() {
|
||||
|
||||
Document document = new Document();
|
||||
if(unique != null) {
|
||||
switch (unique) {
|
||||
case NO -> document.put("unique", false);
|
||||
case YES -> document.put("unique", true);
|
||||
case PREPARE -> document.put("prepareUnique", true);
|
||||
}
|
||||
}
|
||||
if(hidden != null) {
|
||||
document.put("hidden", hidden);
|
||||
}
|
||||
|
||||
|
||||
if (expire != null && !expire.isNegative()) {
|
||||
document.put("expireAfterSeconds", expire.getSeconds());
|
||||
}
|
||||
return document;
|
||||
}
|
||||
}
|
||||
@@ -35,15 +35,6 @@ public interface ReactiveIndexOperations {
|
||||
*/
|
||||
Mono<String> ensureIndex(IndexDefinition indexDefinition);
|
||||
|
||||
/**
|
||||
* Alters the index with given {@literal name}.
|
||||
*
|
||||
* @param name name of index to hide.
|
||||
* @param
|
||||
* @since 4.1
|
||||
*/
|
||||
Mono<Void> alterIndex(String name, IndexOptions options);
|
||||
|
||||
/**
|
||||
* Drops an index from this collection.
|
||||
*
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
|
||||
/**
|
||||
* Annotation to declare index hints for repository query, update and aggregate operations. The index is specified by
|
||||
* its name.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 4.1
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
@Documented
|
||||
public @interface Hint {
|
||||
|
||||
/**
|
||||
* The name of the index to use. In case of an {@literal aggregation} the index is evaluated against the initial
|
||||
* collection or view.
|
||||
*
|
||||
* @return the index name.
|
||||
*/
|
||||
String value() default "";
|
||||
|
||||
/**
|
||||
* The name of the index to use. In case of an {@literal aggregation} the index is evaluated against the initial
|
||||
* collection or view.
|
||||
*
|
||||
* @return the index name.
|
||||
*/
|
||||
@AliasFor("value")
|
||||
String indexName() default "";
|
||||
}
|
||||
@@ -39,14 +39,13 @@ import org.springframework.data.mongodb.core.annotation.Collation;
|
||||
@Target({ ElementType.METHOD, ElementType.ANNOTATION_TYPE })
|
||||
@Documented
|
||||
@QueryAnnotation
|
||||
@Hint
|
||||
public @interface Query {
|
||||
|
||||
/**
|
||||
* Takes a MongoDB JSON string to define the actual query to be executed. This one will take precedence over the
|
||||
* method name then.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String value() default "";
|
||||
|
||||
@@ -54,7 +53,7 @@ public @interface Query {
|
||||
* Defines the fields that should be returned for the given query. Note that only these fields will make it into the
|
||||
* domain object returned.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String fields() default "";
|
||||
|
||||
@@ -130,21 +129,4 @@ public @interface Query {
|
||||
*/
|
||||
@AliasFor(annotation = Collation.class, attribute = "value")
|
||||
String collation() default "";
|
||||
|
||||
/**
|
||||
* The name of the index to use. <br />
|
||||
* {@code @Query(value = "...", hint = "lastname-idx")} can be used as shortcut for:
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Query(...)
|
||||
* @Hint("lastname-idx")
|
||||
* List<User> findAllByLastname(String collation);
|
||||
* </pre>
|
||||
*
|
||||
* @return the index name.
|
||||
* @since 4.1
|
||||
* @see Hint#indexName()
|
||||
*/
|
||||
@AliasFor(annotation = Hint.class, attribute = "indexName")
|
||||
String hint() default "";
|
||||
}
|
||||
|
||||
@@ -127,7 +127,6 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
* @param accessor for providing invocation arguments. Never {@literal null}.
|
||||
* @param typeToRead the desired component target type. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
protected Object doExecute(MongoQueryMethod method, ResultProcessor processor, ConvertingParameterAccessor accessor,
|
||||
@Nullable Class<?> typeToRead) {
|
||||
|
||||
@@ -136,7 +135,6 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
query = applyAnnotatedDefaultSortIfPresent(query);
|
||||
query = applyAnnotatedCollationIfPresent(query, accessor);
|
||||
query = applyHintIfPresent(query);
|
||||
|
||||
FindWithQuery<?> find = typeToRead == null //
|
||||
? executableFind //
|
||||
@@ -227,22 +225,6 @@ public abstract class AbstractMongoQuery implements RepositoryQuery {
|
||||
accessor, getQueryMethod().getParameters(), expressionParser, evaluationContextProvider);
|
||||
}
|
||||
|
||||
/**
|
||||
* If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
Query applyHintIfPresent(Query query) {
|
||||
|
||||
if (!method.hasAnnotatedHint()) {
|
||||
return query;
|
||||
}
|
||||
|
||||
return query.withHint(method.getAnnotatedHint());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to
|
||||
* {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be
|
||||
|
||||
@@ -160,7 +160,6 @@ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery {
|
||||
applyQueryMetaAttributesWhenPresent(query);
|
||||
query = applyAnnotatedDefaultSortIfPresent(query);
|
||||
query = applyAnnotatedCollationIfPresent(query, accessor);
|
||||
query = applyHintIfPresent(query);
|
||||
|
||||
FindWithQuery<?> find = typeToRead == null //
|
||||
? findOperationWithProjection //
|
||||
@@ -270,22 +269,6 @@ public abstract class AbstractReactiveMongoQuery implements RepositoryQuery {
|
||||
accessor, getQueryMethod().getParameters(), expressionParser, evaluationContextProvider);
|
||||
}
|
||||
|
||||
/**
|
||||
* If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
Query applyHintIfPresent(Query query) {
|
||||
|
||||
if (!method.hasAnnotatedHint()) {
|
||||
return query;
|
||||
}
|
||||
|
||||
return query.withHint(method.getAnnotatedHint());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link Query} instance using the given {@link ConvertingParameterAccessor}. Will delegate to
|
||||
* {@link #createQuery(ConvertingParameterAccessor)} by default but allows customization of the count query to be
|
||||
|
||||
@@ -27,7 +27,6 @@ import org.springframework.data.domain.Sort.Order;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Meta;
|
||||
@@ -103,22 +102,6 @@ abstract class AggregationUtils {
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* If present apply the hint from the {@link org.springframework.data.mongodb.repository.Hint} annotation.
|
||||
*
|
||||
* @param builder must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
static AggregationOptions.Builder applyHint(AggregationOptions.Builder builder, MongoQueryMethod queryMethod) {
|
||||
|
||||
if (!queryMethod.hasAnnotatedHint()) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
return builder.hint(queryMethod.getAnnotatedHint());
|
||||
}
|
||||
|
||||
/**
|
||||
* Append {@code $sort} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is present.
|
||||
*
|
||||
@@ -126,7 +109,7 @@ abstract class AggregationUtils {
|
||||
* @param accessor
|
||||
* @param targetType
|
||||
*/
|
||||
static void appendSortIfPresent(AggregationPipeline aggregationPipeline, ConvertingParameterAccessor accessor,
|
||||
static void appendSortIfPresent(List<AggregationOperation> aggregationPipeline, ConvertingParameterAccessor accessor,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (accessor.getSort().isUnsorted()) {
|
||||
@@ -151,7 +134,7 @@ abstract class AggregationUtils {
|
||||
* @param aggregationPipeline
|
||||
* @param accessor
|
||||
*/
|
||||
static void appendLimitAndOffsetIfPresent(AggregationPipeline aggregationPipeline,
|
||||
static void appendLimitAndOffsetIfPresent(List<AggregationOperation> aggregationPipeline,
|
||||
ConvertingParameterAccessor accessor) {
|
||||
appendLimitAndOffsetIfPresent(aggregationPipeline, accessor, LongUnaryOperator.identity(),
|
||||
IntUnaryOperator.identity());
|
||||
@@ -167,7 +150,7 @@ abstract class AggregationUtils {
|
||||
* @param limitOperator
|
||||
* @since 3.3
|
||||
*/
|
||||
static void appendLimitAndOffsetIfPresent(AggregationPipeline aggregationPipeline,
|
||||
static void appendLimitAndOffsetIfPresent(List<AggregationOperation> aggregationPipeline,
|
||||
ConvertingParameterAccessor accessor, LongUnaryOperator offsetOperator, IntUnaryOperator limitOperator) {
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
|
||||
@@ -38,7 +38,6 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.support.PageableExecutionUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
@@ -56,7 +55,6 @@ import com.mongodb.client.result.DeleteResult;
|
||||
@FunctionalInterface
|
||||
interface MongoQueryExecution {
|
||||
|
||||
@Nullable
|
||||
Object execute(Query query);
|
||||
|
||||
/**
|
||||
@@ -293,6 +291,7 @@ interface MongoQueryExecution {
|
||||
final class UpdateExecution implements MongoQueryExecution {
|
||||
|
||||
private final ExecutableUpdate<?> updateOps;
|
||||
private final MongoQueryMethod method;
|
||||
private Supplier<UpdateDefinition> updateDefinitionSupplier;
|
||||
private final MongoParameterAccessor accessor;
|
||||
|
||||
@@ -300,6 +299,7 @@ interface MongoQueryExecution {
|
||||
MongoParameterAccessor accessor) {
|
||||
|
||||
this.updateOps = updateOps;
|
||||
this.method = method;
|
||||
this.updateDefinitionSupplier = updateSupplier;
|
||||
this.accessor = accessor;
|
||||
}
|
||||
|
||||
@@ -33,7 +33,6 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.repository.Aggregation;
|
||||
import org.springframework.data.mongodb.repository.Hint;
|
||||
import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.Query;
|
||||
import org.springframework.data.mongodb.repository.Tailable;
|
||||
@@ -363,26 +362,6 @@ public class MongoQueryMethod extends QueryMethod {
|
||||
"Expected to find @Aggregation annotation but did not; Make sure to check hasAnnotatedAggregation() before."));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link Hint} annotation is present and the index name is not empty.
|
||||
* @since 4.1
|
||||
*/
|
||||
public boolean hasAnnotatedHint() {
|
||||
return doFindAnnotation(Hint.class).map(Hint::indexName).filter(StringUtils::hasText).isPresent();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the aggregation pipeline declared via a {@link Hint} annotation.
|
||||
*
|
||||
* @return the index name (might be empty).
|
||||
* @throws IllegalStateException if the method is not annotated with {@link Hint}
|
||||
* @since 4.1
|
||||
*/
|
||||
public String getAnnotatedHint() {
|
||||
return doFindAnnotation(Hint.class).map(Hint::indexName).orElseThrow(() -> new IllegalStateException(
|
||||
"Expected to find @Hint annotation but did not; Make sure to check hasAnnotatedHint() before."));
|
||||
}
|
||||
|
||||
private Optional<String[]> findAnnotatedAggregation() {
|
||||
|
||||
return lookupAggregationAnnotation() //
|
||||
|
||||
@@ -61,7 +61,7 @@ interface ReactiveMongoQueryExecution {
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
final class GeoNearExecution implements ReactiveMongoQueryExecution {
|
||||
class GeoNearExecution implements ReactiveMongoQueryExecution {
|
||||
|
||||
private final ReactiveMongoOperations operations;
|
||||
private final MongoParameterAccessor accessor;
|
||||
@@ -83,7 +83,7 @@ interface ReactiveMongoQueryExecution {
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private Flux<GeoResult<Object>> doExecuteQuery(@Nullable Query query, Class<?> type, String collection) {
|
||||
protected Flux<GeoResult<Object>> doExecuteQuery(@Nullable Query query, Class<?> type, String collection) {
|
||||
|
||||
Point nearLocation = accessor.getGeoNearLocation();
|
||||
NearQuery nearQuery = NearQuery.near(nearLocation);
|
||||
@@ -154,6 +154,7 @@ interface ReactiveMongoQueryExecution {
|
||||
final class UpdateExecution implements ReactiveMongoQueryExecution {
|
||||
|
||||
private final ReactiveUpdate<?> updateOps;
|
||||
private final MongoQueryMethod method;
|
||||
private final MongoParameterAccessor accessor;
|
||||
private Mono<UpdateDefinition> update;
|
||||
|
||||
@@ -161,6 +162,7 @@ interface ReactiveMongoQueryExecution {
|
||||
Mono<UpdateDefinition> update) {
|
||||
|
||||
this.updateOps = updateOps;
|
||||
this.method = method;
|
||||
this.accessor = accessor;
|
||||
this.update = update;
|
||||
}
|
||||
|
||||
@@ -26,15 +26,12 @@ import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.data.util.ReflectionUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.ExpressionParser;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
@@ -71,6 +68,10 @@ public class ReactiveStringBasedAggregation extends AbstractReactiveMongoQuery {
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#doExecute(org.springframework.data.mongodb.repository.query.ReactiveMongoQueryMethod, org.springframework.data.repository.query.ResultProcessor, org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
protected Publisher<Object> doExecute(ReactiveMongoQueryMethod method, ResultProcessor processor,
|
||||
ConvertingParameterAccessor accessor, Class<?> typeToRead) {
|
||||
@@ -80,7 +81,7 @@ public class ReactiveStringBasedAggregation extends AbstractReactiveMongoQuery {
|
||||
Class<?> sourceType = method.getDomainClass();
|
||||
Class<?> targetType = typeToRead;
|
||||
|
||||
AggregationPipeline pipeline = new AggregationPipeline(it);
|
||||
List<AggregationOperation> pipeline = it;
|
||||
|
||||
AggregationUtils.appendSortIfPresent(pipeline, accessor, typeToRead);
|
||||
AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor);
|
||||
@@ -92,13 +93,10 @@ public class ReactiveStringBasedAggregation extends AbstractReactiveMongoQuery {
|
||||
targetType = Document.class;
|
||||
}
|
||||
|
||||
AggregationOptions options = computeOptions(method, accessor, pipeline);
|
||||
TypedAggregation<?> aggregation = new TypedAggregation<>(sourceType, pipeline.getOperations(), options);
|
||||
AggregationOptions options = computeOptions(method, accessor);
|
||||
TypedAggregation<?> aggregation = new TypedAggregation<>(sourceType, pipeline, options);
|
||||
|
||||
Flux<?> flux = reactiveMongoOperations.aggregate(aggregation, targetType);
|
||||
if (ReflectionUtils.isVoid(typeToRead)) {
|
||||
return flux.then();
|
||||
}
|
||||
|
||||
if (isSimpleReturnType && !isRawReturnType) {
|
||||
flux = flux.handle((item, sink) -> {
|
||||
@@ -123,47 +121,57 @@ public class ReactiveStringBasedAggregation extends AbstractReactiveMongoQuery {
|
||||
return parseAggregationPipeline(getQueryMethod().getAnnotatedAggregation(), accessor);
|
||||
}
|
||||
|
||||
private AggregationOptions computeOptions(MongoQueryMethod method, ConvertingParameterAccessor accessor,
|
||||
AggregationPipeline pipeline) {
|
||||
private AggregationOptions computeOptions(MongoQueryMethod method, ConvertingParameterAccessor accessor) {
|
||||
|
||||
AggregationOptions.Builder builder = Aggregation.newAggregationOptions();
|
||||
|
||||
AggregationUtils.applyCollation(builder, method.getAnnotatedCollation(), accessor, method.getParameters(),
|
||||
expressionParser, evaluationContextProvider);
|
||||
AggregationUtils.applyMeta(builder, method);
|
||||
AggregationUtils.applyHint(builder, method);
|
||||
|
||||
TypeInformation<?> returnType = method.getReturnType();
|
||||
if (returnType.getComponentType() != null) {
|
||||
returnType = returnType.getRequiredComponentType();
|
||||
}
|
||||
if (ReflectionUtils.isVoid(returnType.getType()) && pipeline.isOutOrMerge()) {
|
||||
builder.skipOutput();
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Query> createQuery(ConvertingParameterAccessor accessor) {
|
||||
throw new UnsupportedOperationException("No query support for aggregation");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isCountQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isCountQuery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isExistsQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistsQuery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isDeleteQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isDeleteQuery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#isLimiting()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isLimiting() {
|
||||
return false;
|
||||
|
||||
@@ -21,13 +21,14 @@ import java.util.function.LongUnaryOperator;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -35,9 +36,7 @@ import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
|
||||
import org.springframework.data.repository.query.ResultProcessor;
|
||||
import org.springframework.data.util.ReflectionUtils;
|
||||
import org.springframework.expression.ExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
@@ -61,8 +60,8 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
*
|
||||
* @param method must not be {@literal null}.
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param expressionParser must not be {@literal null}.
|
||||
* @param evaluationContextProvider must not be {@literal null}.
|
||||
* @param expressionParser
|
||||
* @param evaluationContextProvider
|
||||
*/
|
||||
public StringBasedAggregation(MongoQueryMethod method, MongoOperations mongoOperations,
|
||||
ExpressionParser expressionParser, QueryMethodEvaluationContextProvider evaluationContextProvider) {
|
||||
@@ -80,15 +79,18 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractReactiveMongoQuery#doExecute(org.springframework.data.mongodb.repository.query.MongoQueryMethod, org.springframework.data.repository.query.ResultProcessor, org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProcessor,
|
||||
ConvertingParameterAccessor accessor, Class<?> typeToRead) {
|
||||
|
||||
Class<?> sourceType = method.getDomainClass();
|
||||
Class<?> targetType = typeToRead;
|
||||
|
||||
AggregationPipeline pipeline = computePipeline(method, accessor);
|
||||
List<AggregationOperation> pipeline = computePipeline(method, accessor);
|
||||
AggregationUtils.appendSortIfPresent(pipeline, accessor, typeToRead);
|
||||
|
||||
if (method.isSliceQuery()) {
|
||||
@@ -109,8 +111,8 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
targetType = method.getReturnType().getRequiredActualType().getRequiredComponentType().getType();
|
||||
}
|
||||
|
||||
AggregationOptions options = computeOptions(method, accessor, pipeline);
|
||||
TypedAggregation<?> aggregation = new TypedAggregation<>(sourceType, pipeline.getOperations(), options);
|
||||
AggregationOptions options = computeOptions(method, accessor);
|
||||
TypedAggregation<?> aggregation = new TypedAggregation<>(sourceType, pipeline, options);
|
||||
|
||||
if (method.isStreamQuery()) {
|
||||
|
||||
@@ -124,9 +126,6 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
}
|
||||
|
||||
AggregationResults<Object> result = (AggregationResults<Object>) mongoOperations.aggregate(aggregation, targetType);
|
||||
if (ReflectionUtils.isVoid(typeToRead)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (isRawAggregationResult) {
|
||||
return result;
|
||||
@@ -168,47 +167,61 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
return MongoSimpleTypes.HOLDER.isSimpleType(targetType);
|
||||
}
|
||||
|
||||
AggregationPipeline computePipeline(MongoQueryMethod method, ConvertingParameterAccessor accessor) {
|
||||
return new AggregationPipeline(parseAggregationPipeline(method.getAnnotatedAggregation(), accessor));
|
||||
List<AggregationOperation> computePipeline(MongoQueryMethod method, ConvertingParameterAccessor accessor) {
|
||||
return parseAggregationPipeline(method.getAnnotatedAggregation(), accessor);
|
||||
}
|
||||
|
||||
private AggregationOptions computeOptions(MongoQueryMethod method, ConvertingParameterAccessor accessor,
|
||||
AggregationPipeline pipeline) {
|
||||
private AggregationOptions computeOptions(MongoQueryMethod method, ConvertingParameterAccessor accessor) {
|
||||
|
||||
AggregationOptions.Builder builder = Aggregation.newAggregationOptions();
|
||||
|
||||
AggregationUtils.applyCollation(builder, method.getAnnotatedCollation(), accessor, method.getParameters(),
|
||||
expressionParser, evaluationContextProvider);
|
||||
AggregationUtils.applyMeta(builder, method);
|
||||
AggregationUtils.applyHint(builder, method);
|
||||
|
||||
if (ReflectionUtils.isVoid(method.getReturnType().getType()) && pipeline.isOutOrMerge()) {
|
||||
builder.skipOutput();
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#createQuery(org.springframework.data.mongodb.repository.query.ConvertingParameterAccessor)
|
||||
*/
|
||||
@Override
|
||||
protected Query createQuery(ConvertingParameterAccessor accessor) {
|
||||
throw new UnsupportedOperationException("No query support for aggregation");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isCountQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isCountQuery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isExistsQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistsQuery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isDeleteQuery()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isDeleteQuery() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
* @see org.springframework.data.mongodb.repository.query.AbstractMongoQuery#isLimiting()
|
||||
*/
|
||||
@Override
|
||||
protected boolean isLimiting() {
|
||||
return false;
|
||||
|
||||
@@ -17,14 +17,19 @@ package org.springframework.data.mongodb.util.json;
|
||||
|
||||
import static java.time.format.DateTimeFormatter.*;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalQuery;
|
||||
import java.util.Calendar;
|
||||
import java.util.TimeZone;
|
||||
|
||||
/**
|
||||
* DateTimeFormatter implementation borrowed from <a href=
|
||||
* JsonBuffer implementation borrowed from <a href=
|
||||
* "https://github.com/mongodb/mongo-java-driver/blob/master/bson/src/main/org/bson/json/DateTimeFormatter.java">MongoDB
|
||||
* Inc.</a> licensed under the Apache License, Version 2.0. <br />
|
||||
* Formatted and modified.
|
||||
@@ -35,22 +40,133 @@ import java.time.ZonedDateTime;
|
||||
*/
|
||||
class DateTimeFormatter {
|
||||
|
||||
private static final int DATE_STRING_LENGTH = "1970-01-01".length();
|
||||
private static final FormatterImpl FORMATTER_IMPL;
|
||||
|
||||
static long parse(final String dateTimeString) {
|
||||
// ISO_OFFSET_DATE_TIME will not parse date strings consisting of just year-month-day, so use ISO_LOCAL_DATE for
|
||||
// those
|
||||
if (dateTimeString.length() == DATE_STRING_LENGTH) {
|
||||
return LocalDate.parse(dateTimeString, ISO_LOCAL_DATE).atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli();
|
||||
} else {
|
||||
return ISO_OFFSET_DATE_TIME.parse(dateTimeString, Instant::from).toEpochMilli();
|
||||
static {
|
||||
FormatterImpl dateTimeHelper;
|
||||
try {
|
||||
dateTimeHelper = loadDateTimeFormatter(
|
||||
"org.springframework.data.mongodb.util.json.DateTimeFormatter$Java8DateTimeFormatter");
|
||||
} catch (LinkageError e) {
|
||||
// this is expected if running on a release prior to Java 8: fallback to JAXB.
|
||||
dateTimeHelper = loadDateTimeFormatter(
|
||||
"org.springframework.data.mongodb.util.json.DateTimeFormatter$JaxbDateTimeFormatter");
|
||||
}
|
||||
|
||||
FORMATTER_IMPL = dateTimeHelper;
|
||||
}
|
||||
|
||||
private static FormatterImpl loadDateTimeFormatter(final String className) {
|
||||
|
||||
try {
|
||||
return (FormatterImpl) Class.forName(className).getDeclaredConstructor().newInstance();
|
||||
} catch (ClassNotFoundException e) {
|
||||
// this is unexpected as it means the class itself is not found
|
||||
throw new ExceptionInInitializerError(e);
|
||||
} catch (InstantiationException e) {
|
||||
// this is unexpected as it means the class can't be instantiated
|
||||
throw new ExceptionInInitializerError(e);
|
||||
} catch (IllegalAccessException e) {
|
||||
// this is unexpected as it means the no-args constructor isn't accessible
|
||||
throw new ExceptionInInitializerError(e);
|
||||
} catch (NoSuchMethodException e) {
|
||||
throw new ExceptionInInitializerError(e);
|
||||
} catch (InvocationTargetException e) {
|
||||
throw new ExceptionInInitializerError(e);
|
||||
}
|
||||
}
|
||||
|
||||
static String format(final long dateTime) {
|
||||
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateTime), ZoneId.of("Z")).format(ISO_OFFSET_DATE_TIME);
|
||||
static long parse(final String dateTimeString) {
|
||||
return FORMATTER_IMPL.parse(dateTimeString);
|
||||
}
|
||||
|
||||
private DateTimeFormatter() {
|
||||
static String format(final long dateTime) {
|
||||
return FORMATTER_IMPL.format(dateTime);
|
||||
}
|
||||
|
||||
private interface FormatterImpl {
|
||||
long parse(String dateTimeString);
|
||||
|
||||
String format(long dateTime);
|
||||
}
|
||||
|
||||
// Reflective use of DatatypeConverter avoids a compile-time dependency on the java.xml.bind module in Java 9
|
||||
static class JaxbDateTimeFormatter implements FormatterImpl {
|
||||
|
||||
private static final Method DATATYPE_CONVERTER_PARSE_DATE_TIME_METHOD;
|
||||
private static final Method DATATYPE_CONVERTER_PRINT_DATE_TIME_METHOD;
|
||||
|
||||
static {
|
||||
try {
|
||||
DATATYPE_CONVERTER_PARSE_DATE_TIME_METHOD = Class.forName("jakarta.xml.bind.DatatypeConverter")
|
||||
.getDeclaredMethod("parseDateTime", String.class);
|
||||
DATATYPE_CONVERTER_PRINT_DATE_TIME_METHOD = Class.forName("jakarta.xml.bind.DatatypeConverter")
|
||||
.getDeclaredMethod("printDateTime", Calendar.class);
|
||||
} catch (NoSuchMethodException e) {
|
||||
throw new ExceptionInInitializerError(e);
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new ExceptionInInitializerError(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parse(final String dateTimeString) {
|
||||
try {
|
||||
return ((Calendar) DATATYPE_CONVERTER_PARSE_DATE_TIME_METHOD.invoke(null, dateTimeString)).getTimeInMillis();
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new IllegalStateException(e);
|
||||
} catch (InvocationTargetException e) {
|
||||
throw (RuntimeException) e.getCause();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(final long dateTime) {
|
||||
Calendar calendar = Calendar.getInstance();
|
||||
calendar.setTimeInMillis(dateTime);
|
||||
calendar.setTimeZone(TimeZone.getTimeZone("Z"));
|
||||
try {
|
||||
return (String) DATATYPE_CONVERTER_PRINT_DATE_TIME_METHOD.invoke(null, calendar);
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new IllegalStateException();
|
||||
} catch (InvocationTargetException e) {
|
||||
throw (RuntimeException) e.getCause();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class Java8DateTimeFormatter implements FormatterImpl {
|
||||
|
||||
// if running on Java 8 or above then java.time.format.DateTimeFormatter will be available and initialization will
|
||||
// succeed.
|
||||
// Otherwise it will fail.
|
||||
static {
|
||||
try {
|
||||
Class.forName("java.time.format.DateTimeFormatter");
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new ExceptionInInitializerError(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long parse(final String dateTimeString) {
|
||||
try {
|
||||
return ISO_OFFSET_DATE_TIME.parse(dateTimeString, new TemporalQuery<Instant>() {
|
||||
@Override
|
||||
public Instant queryFrom(final TemporalAccessor temporal) {
|
||||
return Instant.from(temporal);
|
||||
}
|
||||
}).toEpochMilli();
|
||||
} catch (DateTimeParseException e) {
|
||||
throw new IllegalArgumentException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String format(final long dateTime) {
|
||||
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(dateTime), ZoneId.of("Z")).format(ISO_OFFSET_DATE_TIME);
|
||||
}
|
||||
}
|
||||
|
||||
private DateTimeFormatter() {}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,6 @@ import static java.lang.String.*;
|
||||
import java.text.DateFormat;
|
||||
import java.text.ParsePosition;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.util.Base64;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
@@ -43,6 +41,7 @@ import org.springframework.data.spel.EvaluationContextProvider;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Base64Utils;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -290,9 +289,10 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
} else if ("DBPointer".equals(value)) {
|
||||
setCurrentBsonType(BsonType.DB_POINTER);
|
||||
currentValue = visitDBPointerConstructor();
|
||||
} else if ("UUID".equals(value)) {
|
||||
} else if ("UUID".equals(value) || "GUID".equals(value) || "CSUUID".equals(value) || "CSGUID".equals(value)
|
||||
|| "JUUID".equals(value) || "JGUID".equals(value) || "PYUUID".equals(value) || "PYGUID".equals(value)) {
|
||||
setCurrentBsonType(BsonType.BINARY);
|
||||
currentValue = visitUUIDConstructor();
|
||||
currentValue = visitUUIDConstructor(value);
|
||||
} else if ("new".equals(value)) {
|
||||
visitNew();
|
||||
} else {
|
||||
@@ -840,8 +840,9 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
} else if ("DBPointer".equals(value)) {
|
||||
currentValue = visitDBPointerConstructor();
|
||||
setCurrentBsonType(BsonType.DB_POINTER);
|
||||
} else if ("UUID".equals(value)) {
|
||||
currentValue = visitUUIDConstructor();
|
||||
} else if ("UUID".equals(value) || "GUID".equals(value) || "CSUUID".equals(value) || "CSGUID".equals(value)
|
||||
|| "JUUID".equals(value) || "JGUID".equals(value) || "PYUUID".equals(value) || "PYGUID".equals(value)) {
|
||||
currentValue = visitUUIDConstructor(value);
|
||||
setCurrentBsonType(BsonType.BINARY);
|
||||
} else {
|
||||
throw new JsonParseException("JSON reader expected a type name but found '%s'.", value);
|
||||
@@ -861,13 +862,7 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
setCurrentBsonType(BsonType.BINARY);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if ("$uuid".equals(value)) {
|
||||
currentValue = visitUuidExtendedJson();
|
||||
setCurrentBsonType(BsonType.BINARY);
|
||||
return;
|
||||
}
|
||||
else if ("$regex".equals(value) || "$options".equals(value)) {
|
||||
} else if ("$regex".equals(value) || "$options".equals(value)) {
|
||||
currentValue = visitRegularExpressionExtendedJson(value);
|
||||
if (currentValue != null) {
|
||||
setCurrentBsonType(BsonType.REGULAR_EXPRESSION);
|
||||
@@ -957,16 +952,20 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
}
|
||||
verifyToken(JsonTokenType.RIGHT_PAREN);
|
||||
|
||||
byte[] bytes = Base64.getDecoder().decode(bytesToken.getValue(String.class));
|
||||
byte[] bytes = Base64Utils.decodeFromString(bytesToken.getValue(String.class));
|
||||
return new BsonBinary(subTypeToken.getValue(Integer.class).byteValue(), bytes);
|
||||
}
|
||||
|
||||
private BsonBinary visitUUIDConstructor() {
|
||||
this.verifyToken(JsonTokenType.LEFT_PAREN);
|
||||
String hexString = this.readStringFromExtendedJson().replace("-", "");
|
||||
|
||||
this.verifyToken(JsonTokenType.RIGHT_PAREN);
|
||||
return new BsonBinary(BsonBinarySubType.UUID_STANDARD, decodeHex(hexString));
|
||||
private BsonBinary visitUUIDConstructor(final String uuidConstructorName) {
|
||||
verifyToken(JsonTokenType.LEFT_PAREN);
|
||||
String hexString = readStringFromExtendedJson().replaceAll("\\{", "").replaceAll("}", "").replaceAll("-", "");
|
||||
verifyToken(JsonTokenType.RIGHT_PAREN);
|
||||
byte[] bytes = decodeHex(hexString);
|
||||
BsonBinarySubType subType = BsonBinarySubType.UUID_STANDARD;
|
||||
if (!"UUID".equals(uuidConstructorName) || !"GUID".equals(uuidConstructorName)) {
|
||||
subType = BsonBinarySubType.UUID_LEGACY;
|
||||
}
|
||||
return new BsonBinary(subType, bytes);
|
||||
}
|
||||
|
||||
private BsonRegularExpression visitRegularExpressionConstructor() {
|
||||
@@ -1080,14 +1079,28 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
}
|
||||
|
||||
verifyToken(JsonTokenType.RIGHT_PAREN);
|
||||
|
||||
String dateTimeString = token.getValue(String.class);
|
||||
String[] patterns = { "yyyy-MM-dd", "yyyy-MM-dd'T'HH:mm:ssz", "yyyy-MM-dd'T'HH:mm:ss.SSSz" };
|
||||
|
||||
try {
|
||||
return DateTimeFormatter.parse(dateTimeString);
|
||||
} catch (DateTimeParseException e) {
|
||||
throw new JsonParseException("Failed to parse string as a date: " + dateTimeString, e);
|
||||
SimpleDateFormat format = new SimpleDateFormat(patterns[0], Locale.ENGLISH);
|
||||
ParsePosition pos = new ParsePosition(0);
|
||||
String s = token.getValue(String.class);
|
||||
|
||||
if (s.endsWith("Z")) {
|
||||
s = s.substring(0, s.length() - 1) + "GMT-00:00";
|
||||
}
|
||||
|
||||
for (final String pattern : patterns) {
|
||||
format.applyPattern(pattern);
|
||||
format.setLenient(true);
|
||||
pos.setIndex(0);
|
||||
|
||||
Date date = format.parse(s, pos);
|
||||
|
||||
if (date != null && pos.getIndex() == s.length()) {
|
||||
return date.getTime();
|
||||
}
|
||||
}
|
||||
throw new JsonParseException("Invalid date format.");
|
||||
}
|
||||
|
||||
private BsonBinary visitHexDataConstructor() {
|
||||
@@ -1205,7 +1218,7 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
byte type;
|
||||
if (firstNestedKey.equals("base64")) {
|
||||
verifyToken(JsonTokenType.COLON);
|
||||
data = Base64.getDecoder().decode(readStringFromExtendedJson());
|
||||
data = Base64Utils.decodeFromString(readStringFromExtendedJson());
|
||||
verifyToken(JsonTokenType.COMMA);
|
||||
verifyString("subType");
|
||||
verifyToken(JsonTokenType.COLON);
|
||||
@@ -1216,7 +1229,7 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
verifyToken(JsonTokenType.COMMA);
|
||||
verifyString("base64");
|
||||
verifyToken(JsonTokenType.COLON);
|
||||
data = Base64.getDecoder().decode(readStringFromExtendedJson());
|
||||
data = Base64Utils.decodeFromString(readStringFromExtendedJson());
|
||||
} else {
|
||||
throw new JsonParseException("Unexpected key for $binary: " + firstNestedKey);
|
||||
}
|
||||
@@ -1244,7 +1257,7 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
byte type;
|
||||
|
||||
if (firstKey.equals("$binary")) {
|
||||
data = Base64.getDecoder().decode(readStringFromExtendedJson());
|
||||
data = Base64Utils.decodeFromString(readStringFromExtendedJson());
|
||||
verifyToken(JsonTokenType.COMMA);
|
||||
verifyString("$type");
|
||||
verifyToken(JsonTokenType.COLON);
|
||||
@@ -1254,7 +1267,7 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
verifyToken(JsonTokenType.COMMA);
|
||||
verifyString("$binary");
|
||||
verifyToken(JsonTokenType.COLON);
|
||||
data = Base64.getDecoder().decode(readStringFromExtendedJson());
|
||||
data = Base64Utils.decodeFromString(readStringFromExtendedJson());
|
||||
}
|
||||
verifyToken(JsonTokenType.END_OBJECT);
|
||||
|
||||
@@ -1412,8 +1425,7 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
// Spring Data Customization START
|
||||
|
||||
if (patternToken.getType() == JsonTokenType.STRING || patternToken.getType() == JsonTokenType.UNQUOTED_STRING) {
|
||||
Object value = bindableValueFor(patternToken).getValue();
|
||||
return value != null ? value.toString() : null;
|
||||
return bindableValueFor(patternToken).getValue().toString();
|
||||
}
|
||||
|
||||
throw new JsonParseException("JSON reader expected a string but found '%s'.", patternToken.getValue());
|
||||
@@ -1472,17 +1484,6 @@ public class ParameterBindingJsonReader extends AbstractBsonReader {
|
||||
return value;
|
||||
}
|
||||
|
||||
private BsonBinary visitUuidExtendedJson() {
|
||||
verifyToken(JsonTokenType.COLON);
|
||||
String hexString = this.readStringFromExtendedJson().replace("-", "");
|
||||
verifyToken(JsonTokenType.END_OBJECT);
|
||||
try {
|
||||
return new BsonBinary(BsonBinarySubType.UUID_STANDARD, decodeHex(hexString));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new JsonParseException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private void visitJavaScriptExtendedJson() {
|
||||
verifyToken(JsonTokenType.COLON);
|
||||
String code = readStringFromExtendedJson();
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.aot.generate.ClassNameGenerator;
|
||||
import org.springframework.aot.generate.DefaultGenerationContext;
|
||||
import org.springframework.aot.generate.GenerationContext;
|
||||
import org.springframework.aot.generate.InMemoryGeneratedFiles;
|
||||
import org.springframework.aot.hint.predicate.RuntimeHintsPredicates;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.javapoet.ClassName;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link LazyLoadingProxyAotProcessor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class LazyLoadingProxyAotProcessorUnitTests {
|
||||
|
||||
@Test // GH-4351
|
||||
void registersProxyForLazyDbRefCorrectlyWhenTypeIsCollectionInterface() {
|
||||
|
||||
GenerationContext ctx = new DefaultGenerationContext(new ClassNameGenerator(ClassName.get(this.getClass())),
|
||||
new InMemoryGeneratedFiles());
|
||||
|
||||
new LazyLoadingProxyAotProcessor().registerLazyLoadingProxyIfNeeded(A.class, ctx);
|
||||
|
||||
assertThat(ctx.getRuntimeHints())
|
||||
.satisfies(RuntimeHintsPredicates.proxies().forInterfaces(java.util.Collection.class,
|
||||
org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class, java.util.List.class,
|
||||
org.springframework.aop.SpringProxy.class, org.springframework.aop.framework.Advised.class,
|
||||
org.springframework.core.DecoratingProxy.class)::test);
|
||||
}
|
||||
|
||||
static class A {
|
||||
|
||||
String id;
|
||||
|
||||
@DBRef(lazy = true) //
|
||||
List<B> listRef;
|
||||
}
|
||||
|
||||
static class B {
|
||||
String id;
|
||||
}
|
||||
}
|
||||
@@ -21,26 +21,20 @@ import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.test.util.MongoTemplateExtension;
|
||||
import org.springframework.data.mongodb.test.util.MongoTestTemplate;
|
||||
import org.springframework.data.mongodb.test.util.Template;
|
||||
@@ -48,7 +42,6 @@ import org.springframework.data.util.Pair;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
@@ -142,14 +135,13 @@ public class DefaultBulkOperationsIntegrationTests {
|
||||
});
|
||||
}
|
||||
|
||||
@ParameterizedTest // DATAMONGO-934, GH-3872
|
||||
@MethodSource("upsertArguments")
|
||||
void upsertDoesUpdate(UpdateDefinition update) {
|
||||
@Test // DATAMONGO-934
|
||||
public void upsertDoesUpdate() {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("value", "value1"), update).//
|
||||
upsert(where("value", "value1"), set("value", "value2")).//
|
||||
execute();
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
@@ -160,12 +152,11 @@ public class DefaultBulkOperationsIntegrationTests {
|
||||
assertThat(result.getUpserts().size()).isZero();
|
||||
}
|
||||
|
||||
@ParameterizedTest // DATAMONGO-934, GH-3872
|
||||
@MethodSource("upsertArguments")
|
||||
void upsertDoesInsert(UpdateDefinition update) {
|
||||
@Test // DATAMONGO-934
|
||||
public void upsertDoesInsert() {
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("_id", "1"), update).//
|
||||
upsert(where("_id", "1"), set("value", "v1")).//
|
||||
execute();
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
@@ -180,37 +171,11 @@ public class DefaultBulkOperationsIntegrationTests {
|
||||
testUpdate(BulkMode.ORDERED, false, 2);
|
||||
}
|
||||
|
||||
@Test // GH-3872
|
||||
public void updateOneWithAggregation() {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
BulkOperations bulkOps = createBulkOps(BulkMode.ORDERED);
|
||||
bulkOps.updateOne(where("value", "value1"), AggregationUpdate.update().set("value").toValue("value3"));
|
||||
BulkWriteResult result = bulkOps.execute();
|
||||
|
||||
assertThat(result.getModifiedCount()).isEqualTo(1);
|
||||
assertThat(operations.<Long>execute(COLLECTION_NAME, collection -> collection.countDocuments(new org.bson.Document("value", "value3")))).isOne();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-934
|
||||
public void updateMultiOrdered() {
|
||||
testUpdate(BulkMode.ORDERED, true, 4);
|
||||
}
|
||||
|
||||
@Test // GH-3872
|
||||
public void updateMultiWithAggregation() {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
BulkOperations bulkOps = createBulkOps(BulkMode.ORDERED);
|
||||
bulkOps.updateMulti(where("value", "value1"), AggregationUpdate.update().set("value").toValue("value3"));
|
||||
BulkWriteResult result = bulkOps.execute();
|
||||
|
||||
assertThat(result.getModifiedCount()).isEqualTo(2);
|
||||
assertThat(operations.<Long>execute(COLLECTION_NAME, collection -> collection.countDocuments(new org.bson.Document("value", "value3")))).isEqualTo(2);
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-934
|
||||
public void updateOneUnOrdered() {
|
||||
testUpdate(BulkMode.UNORDERED, false, 2);
|
||||
@@ -295,7 +260,7 @@ public class DefaultBulkOperationsIntegrationTests {
|
||||
public void mixedBulkOrderedWithList() {
|
||||
|
||||
List<BaseDoc> inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2"));
|
||||
List<Pair<Query, UpdateDefinition>> updates = Arrays.asList(Pair.of(where("value", "v2"), set("value", "v3")));
|
||||
List<Pair<Query, Update>> updates = Arrays.asList(Pair.of(where("value", "v2"), set("value", "v3")));
|
||||
List<Query> removes = Arrays.asList(where("_id", "1"));
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(inserts)
|
||||
@@ -329,7 +294,7 @@ public class DefaultBulkOperationsIntegrationTests {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
List<Pair<Query, UpdateDefinition>> updates = new ArrayList<>();
|
||||
List<Pair<Query, Update>> updates = new ArrayList<Pair<Query, Update>>();
|
||||
updates.add(Pair.of(where("value", "value1"), set("value", "value3")));
|
||||
updates.add(Pair.of(where("value", "value2"), set("value", "value4")));
|
||||
|
||||
@@ -390,10 +355,6 @@ public class DefaultBulkOperationsIntegrationTests {
|
||||
coll.insertOne(rawDoc("4", "value2"));
|
||||
}
|
||||
|
||||
private static Stream<Arguments> upsertArguments() {
|
||||
return Stream.of(Arguments.of(set("value", "value2")), Arguments.of(AggregationUpdate.update().set("value").toValue("value2")));
|
||||
}
|
||||
|
||||
private static BaseDoc newDoc(String id) {
|
||||
|
||||
BaseDoc doc = new BaseDoc();
|
||||
|
||||
@@ -173,40 +173,6 @@ public class DefaultIndexOperationsIntegrationTests {
|
||||
assertThat(result).isEqualTo(expected);
|
||||
}
|
||||
|
||||
@Test // GH-4348
|
||||
void indexShouldNotBeHiddenByDefault() {
|
||||
|
||||
IndexDefinition index = new Index().named("my-index").on("a", Direction.ASC);
|
||||
|
||||
indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class);
|
||||
indexOps.ensureIndex(index);
|
||||
|
||||
IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-index");
|
||||
assertThat(info.isHidden()).isFalse();
|
||||
}
|
||||
|
||||
@Test // GH-4348
|
||||
void shouldCreateHiddenIndex() {
|
||||
|
||||
IndexDefinition index = new Index().named("my-hidden-index").on("a", Direction.ASC).hidden();
|
||||
|
||||
indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class);
|
||||
indexOps.ensureIndex(index);
|
||||
|
||||
IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-hidden-index");
|
||||
assertThat(info.isHidden()).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4348
|
||||
void alterIndexShouldAllowHiding() {
|
||||
|
||||
collection.createIndex(new Document("a", 1), new IndexOptions().name("my-index"));
|
||||
|
||||
indexOps.alterIndex("my-index", org.springframework.data.mongodb.core.index.IndexOptions.hidden());
|
||||
IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-index");
|
||||
assertThat(info.isHidden()).isTrue();
|
||||
}
|
||||
|
||||
private IndexInfo findAndReturnIndexInfo(org.bson.Document keys) {
|
||||
return findAndReturnIndexInfo(indexOps.getIndexInfo(), keys);
|
||||
}
|
||||
|
||||
@@ -1,348 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.springframework.dao.DuplicateKeyException;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.test.util.MongoTemplateExtension;
|
||||
import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate;
|
||||
import org.springframework.data.mongodb.test.util.Template;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Tests for {@link DefaultReactiveBulkOperations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@ExtendWith(MongoTemplateExtension.class)
|
||||
class DefaultReactiveBulkOperationsTests {
|
||||
|
||||
static final String COLLECTION_NAME = "reactive-bulk-ops";
|
||||
|
||||
@Template(initialEntitySet = BaseDoc.class) static ReactiveMongoTestTemplate template;
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
template.flush(COLLECTION_NAME).as(StepVerifier::create).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void insertOrdered() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
|
||||
|
||||
createBulkOps(BulkMode.ORDERED).insert(documents) //
|
||||
.execute().as(StepVerifier::create) //
|
||||
.consumeNextWith(result -> {
|
||||
assertThat(result.getInsertedCount()).isEqualTo(2);
|
||||
}).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void insertOrderedFails() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
|
||||
|
||||
createBulkOps(BulkMode.ORDERED).insert(documents) //
|
||||
.execute().as(StepVerifier::create) //
|
||||
.verifyErrorSatisfies(error -> {
|
||||
assertThat(error).isInstanceOf(DuplicateKeyException.class);
|
||||
});
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void insertUnOrdered() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("2"));
|
||||
|
||||
createBulkOps(BulkMode.UNORDERED).insert(documents) //
|
||||
.execute().as(StepVerifier::create) //
|
||||
.consumeNextWith(result -> {
|
||||
assertThat(result.getInsertedCount()).isEqualTo(2);
|
||||
}).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void insertUnOrderedContinuesOnError() {
|
||||
|
||||
List<BaseDoc> documents = Arrays.asList(newDoc("1"), newDoc("1"), newDoc("2"));
|
||||
|
||||
createBulkOps(BulkMode.UNORDERED).insert(documents) //
|
||||
.execute().as(StepVerifier::create) //
|
||||
.verifyErrorSatisfies(error -> {
|
||||
|
||||
assertThat(error).isInstanceOf(DuplicateKeyException.class);
|
||||
assertThat(error.getCause()).isInstanceOf(MongoBulkWriteException.class);
|
||||
|
||||
MongoBulkWriteException cause = (MongoBulkWriteException) error.getCause();
|
||||
assertThat(cause.getWriteResult().getInsertedCount()).isEqualTo(2);
|
||||
assertThat(cause.getWriteErrors()).isNotNull();
|
||||
assertThat(cause.getWriteErrors().size()).isOne();
|
||||
});
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void upsertDoesUpdate() {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("value", "value1"), set("value", "value2")).//
|
||||
execute().as(StepVerifier::create) //
|
||||
.consumeNextWith(result -> {
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getMatchedCount()).isEqualTo(2);
|
||||
assertThat(result.getModifiedCount()).isEqualTo(2);
|
||||
assertThat(result.getInsertedCount()).isZero();
|
||||
assertThat(result.getUpserts()).isNotNull();
|
||||
assertThat(result.getUpserts().size()).isZero();
|
||||
}) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void upsertDoesInsert() {
|
||||
|
||||
createBulkOps(BulkMode.ORDERED).//
|
||||
upsert(where("_id", "1"), set("value", "v1")).//
|
||||
execute().as(StepVerifier::create) //
|
||||
.consumeNextWith(result -> {
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getMatchedCount()).isZero();
|
||||
assertThat(result.getModifiedCount()).isZero();
|
||||
assertThat(result.getUpserts()).isNotNull();
|
||||
assertThat(result.getUpserts().size()).isOne();
|
||||
}) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@ParameterizedTest // GH-2821
|
||||
@MethodSource
|
||||
public void testUpdates(BulkMode mode, boolean multi, int expectedUpdateCount) {
|
||||
|
||||
insertSomeDocuments();
|
||||
ReactiveBulkOperations bulkOps = createBulkOps(mode);
|
||||
|
||||
if (multi) {
|
||||
bulkOps.updateMulti(where("value", "value1"), set("value", "value3"));
|
||||
bulkOps.updateMulti(where("value", "value2"), set("value", "value4"));
|
||||
} else {
|
||||
bulkOps.updateOne(where("value", "value1"), set("value", "value3"));
|
||||
bulkOps.updateOne(where("value", "value2"), set("value", "value4"));
|
||||
}
|
||||
|
||||
bulkOps.execute().map(BulkWriteResult::getModifiedCount) //
|
||||
.as(StepVerifier::create) //
|
||||
.expectNext(expectedUpdateCount) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
private static Stream<Arguments> testUpdates() {
|
||||
return Stream.of(Arguments.of(BulkMode.ORDERED, false, 2), Arguments.of(BulkMode.ORDERED, true, 4),
|
||||
Arguments.of(BulkMode.UNORDERED, false, 2), Arguments.of(BulkMode.UNORDERED, false, 2));
|
||||
}
|
||||
|
||||
@ParameterizedTest // GH-2821
|
||||
@EnumSource(BulkMode.class)
|
||||
void testRemove(BulkMode mode) {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
List<Query> removes = Arrays.asList(where("_id", "1"), where("value", "value2"));
|
||||
|
||||
createBulkOps(mode).remove(removes).execute().map(BulkWriteResult::getDeletedCount).as(StepVerifier::create)
|
||||
.expectNext(3).verifyComplete();
|
||||
}
|
||||
|
||||
@ParameterizedTest // GH-2821
|
||||
@EnumSource(BulkMode.class)
|
||||
void testReplaceOne(BulkMode mode) {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
Query query = where("_id", "1");
|
||||
Document document = rawDoc("1", "value2");
|
||||
createBulkOps(mode).replaceOne(query, document).execute().map(BulkWriteResult::getModifiedCount)
|
||||
.as(StepVerifier::create).expectNext(1).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void replaceOneDoesReplace() {
|
||||
|
||||
insertSomeDocuments();
|
||||
|
||||
createBulkOps(BulkMode.ORDERED).//
|
||||
replaceOne(where("_id", "1"), rawDoc("1", "value2")).//
|
||||
execute().as(StepVerifier::create).consumeNextWith(result -> {
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getMatchedCount()).isOne();
|
||||
assertThat(result.getModifiedCount()).isOne();
|
||||
assertThat(result.getInsertedCount()).isZero();
|
||||
}).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void replaceOneWithUpsert() {
|
||||
|
||||
createBulkOps(BulkMode.ORDERED).//
|
||||
replaceOne(where("_id", "1"), rawDoc("1", "value2"), FindAndReplaceOptions.options().upsert()).//
|
||||
execute().as(StepVerifier::create).consumeNextWith(result -> {
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getMatchedCount()).isZero();
|
||||
assertThat(result.getInsertedCount()).isZero();
|
||||
assertThat(result.getModifiedCount()).isZero();
|
||||
assertThat(result.getUpserts().size()).isOne();
|
||||
});
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void mixedBulkOrdered() {
|
||||
|
||||
createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(newDoc("1", "v1")).//
|
||||
updateOne(where("_id", "1"), set("value", "v2")).//
|
||||
remove(where("value", "v2")).//
|
||||
execute().as(StepVerifier::create).consumeNextWith(result -> {
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getInsertedCount()).isOne();
|
||||
assertThat(result.getModifiedCount()).isOne();
|
||||
assertThat(result.getDeletedCount()).isOne();
|
||||
}).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void mixedBulkOrderedWithList() {
|
||||
|
||||
List<BaseDoc> inserts = Arrays.asList(newDoc("1", "v1"), newDoc("2", "v2"), newDoc("3", "v2"));
|
||||
List<Query> removes = Arrays.asList(where("_id", "1"));
|
||||
|
||||
createBulkOps(BulkMode.ORDERED, BaseDoc.class).insert(inserts).updateMulti(where("value", "v2"), set("value", "v3"))
|
||||
.remove(removes).execute().as(StepVerifier::create).consumeNextWith(result -> {
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getInsertedCount()).isEqualTo(3);
|
||||
assertThat(result.getModifiedCount()).isEqualTo(2);
|
||||
assertThat(result.getDeletedCount()).isOne();
|
||||
}).verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
public void insertShouldConsiderInheritance() {
|
||||
|
||||
SpecialDoc specialDoc = new SpecialDoc();
|
||||
specialDoc.id = "id-special";
|
||||
specialDoc.value = "normal-value";
|
||||
specialDoc.specialValue = "special-value";
|
||||
|
||||
createBulkOps(BulkMode.ORDERED, SpecialDoc.class).insert(Arrays.asList(specialDoc)).execute().then()
|
||||
.as(StepVerifier::create).verifyComplete();
|
||||
|
||||
template.findOne(where("_id", specialDoc.id), BaseDoc.class, COLLECTION_NAME).as(StepVerifier::create)
|
||||
.consumeNextWith(doc -> {
|
||||
|
||||
assertThat(doc).isNotNull();
|
||||
assertThat(doc).isInstanceOf(SpecialDoc.class);
|
||||
}).verifyComplete();
|
||||
}
|
||||
|
||||
private void insertSomeDocuments() {
|
||||
|
||||
template.execute(COLLECTION_NAME, collection -> {
|
||||
return Flux.from(collection.insertMany(
|
||||
List.of(rawDoc("1", "value1"), rawDoc("2", "value1"), rawDoc("3", "value2"), rawDoc("4", "value2"))));
|
||||
}).then().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
}
|
||||
|
||||
private DefaultReactiveBulkOperations createBulkOps(BulkMode mode) {
|
||||
return createBulkOps(mode, null);
|
||||
}
|
||||
|
||||
private DefaultReactiveBulkOperations createBulkOps(BulkMode mode, Class<?> entityType) {
|
||||
|
||||
Optional<? extends MongoPersistentEntity<?>> entity = entityType != null
|
||||
? Optional.of(template.getConverter().getMappingContext().getPersistentEntity(entityType))
|
||||
: Optional.empty();
|
||||
|
||||
ReactiveBulkOperationContext bulkOperationContext = new ReactiveBulkOperationContext(mode, entity,
|
||||
new QueryMapper(template.getConverter()), new UpdateMapper(template.getConverter()), null, null);
|
||||
|
||||
DefaultReactiveBulkOperations bulkOps = new DefaultReactiveBulkOperations(template, COLLECTION_NAME,
|
||||
bulkOperationContext);
|
||||
bulkOps.setDefaultWriteConcern(WriteConcern.ACKNOWLEDGED);
|
||||
|
||||
return bulkOps;
|
||||
}
|
||||
|
||||
private static BaseDoc newDoc(String id) {
|
||||
|
||||
BaseDoc doc = new BaseDoc();
|
||||
doc.id = id;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static BaseDoc newDoc(String id, String value) {
|
||||
|
||||
BaseDoc doc = newDoc(id);
|
||||
doc.value = value;
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static Query where(String field, String value) {
|
||||
return new Query().addCriteria(Criteria.where(field).is(value));
|
||||
}
|
||||
|
||||
private static Update set(String field, String value) {
|
||||
return new Update().set(field, value);
|
||||
}
|
||||
|
||||
private static Document rawDoc(String id, String value) {
|
||||
return new Document("_id", id).append("value", value);
|
||||
}
|
||||
}
|
||||
@@ -1,347 +0,0 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
import static org.springframework.data.mongodb.core.query.Query.*;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.bson.BsonString;
|
||||
import org.bson.Document;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Answers;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.junit.jupiter.MockitoSettings;
|
||||
import org.mockito.quality.Strictness;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.DefaultBulkOperationsUnitTests.NullExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
|
||||
import com.mongodb.MongoWriteException;
|
||||
import com.mongodb.WriteError;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@MockitoSettings(strictness = Strictness.LENIENT)
|
||||
public class DefaultReactiveBulkOperationsUnitTests {
|
||||
|
||||
ReactiveMongoTemplate template;
|
||||
@Mock ReactiveMongoDatabaseFactory factory;
|
||||
|
||||
@Mock MongoDatabase database;
|
||||
@Mock(answer = Answers.RETURNS_DEEP_STUBS) MongoCollection<Document> collection;
|
||||
@Captor ArgumentCaptor<List<WriteModel<Document>>> captor;
|
||||
|
||||
private MongoConverter converter;
|
||||
private MongoMappingContext mappingContext;
|
||||
|
||||
private DefaultReactiveBulkOperations ops;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
|
||||
when(factory.getMongoDatabase()).thenReturn(Mono.just(database));
|
||||
when(factory.getExceptionTranslator()).thenReturn(new NullExceptionTranslator());
|
||||
when(database.getCollection(anyString(), eq(Document.class))).thenReturn(collection);
|
||||
when(collection.bulkWrite(anyList(), any())).thenReturn(Mono.just(mock(BulkWriteResult.class)));
|
||||
|
||||
mappingContext = new MongoMappingContext();
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
template = new ReactiveMongoTemplate(factory, converter);
|
||||
|
||||
ops = new DefaultReactiveBulkOperations(template, "collection-1",
|
||||
new ReactiveBulkOperationContext(BulkMode.ORDERED,
|
||||
Optional.of(mappingContext.getPersistentEntity(SomeDomainType.class)), new QueryMapper(converter),
|
||||
new UpdateMapper(converter), null, null));
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void updateOneShouldUseCollationWhenPresent() {
|
||||
|
||||
ops.updateOne(new BasicQuery("{}").collation(Collation.of("de")), new Update().set("lastName", "targaryen"))
|
||||
.execute().subscribe();
|
||||
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
assertThat(captor.getValue().get(0)).isInstanceOf(UpdateOneModel.class);
|
||||
assertThat(((UpdateOneModel<Document>) captor.getValue().get(0)).getOptions().getCollation())
|
||||
.isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build());
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void replaceOneShouldUseCollationWhenPresent() {
|
||||
|
||||
ops.replaceOne(new BasicQuery("{}").collation(Collation.of("de")), new SomeDomainType()).execute().subscribe();
|
||||
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
|
||||
assertThat(captor.getValue().get(0)).isInstanceOf(ReplaceOneModel.class);
|
||||
assertThat(((ReplaceOneModel<Document>) captor.getValue().get(0)).getReplaceOptions().getCollation())
|
||||
.isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build());
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void removeShouldUseCollationWhenPresent() {
|
||||
|
||||
ops.remove(new BasicQuery("{}").collation(Collation.of("de"))).execute().subscribe();
|
||||
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
|
||||
assertThat(captor.getValue().get(0)).isInstanceOf(DeleteManyModel.class);
|
||||
assertThat(((DeleteManyModel<Document>) captor.getValue().get(0)).getOptions().getCollation())
|
||||
.isEqualTo(com.mongodb.client.model.Collation.builder().locale("de").build());
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void bulkUpdateShouldMapQueryAndUpdateCorrectly() {
|
||||
|
||||
ops.updateOne(query(where("firstName").is("danerys")), Update.update("firstName", "queen danerys")).execute()
|
||||
.subscribe();
|
||||
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
|
||||
UpdateOneModel<Document> updateModel = (UpdateOneModel<Document>) captor.getValue().get(0);
|
||||
assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys"));
|
||||
assertThat(updateModel.getUpdate()).isEqualTo(new Document("$set", new Document("first_name", "queen danerys")));
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void bulkRemoveShouldMapQueryCorrectly() {
|
||||
|
||||
ops.remove(query(where("firstName").is("danerys"))).execute().subscribe();
|
||||
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
|
||||
DeleteManyModel<Document> updateModel = (DeleteManyModel<Document>) captor.getValue().get(0);
|
||||
assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys"));
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void bulkReplaceOneShouldMapQueryCorrectly() {
|
||||
|
||||
SomeDomainType replacement = new SomeDomainType();
|
||||
replacement.firstName = "Minsu";
|
||||
replacement.lastName = "Kim";
|
||||
|
||||
ops.replaceOne(query(where("firstName").is("danerys")), replacement).execute().subscribe();
|
||||
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
|
||||
ReplaceOneModel<Document> updateModel = (ReplaceOneModel<Document>) captor.getValue().get(0);
|
||||
assertThat(updateModel.getFilter()).isEqualTo(new Document("first_name", "danerys"));
|
||||
assertThat(updateModel.getReplacement().getString("first_name")).isEqualTo("Minsu");
|
||||
assertThat(updateModel.getReplacement().getString("lastName")).isEqualTo("Kim");
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void bulkInsertInvokesEntityCallbacks() {
|
||||
|
||||
BeforeConvertPersonCallback beforeConvertCallback = spy(new BeforeConvertPersonCallback());
|
||||
BeforeSavePersonCallback beforeSaveCallback = spy(new BeforeSavePersonCallback());
|
||||
AfterSavePersonCallback afterSaveCallback = spy(new AfterSavePersonCallback());
|
||||
|
||||
ops = new DefaultReactiveBulkOperations(template, "collection-1",
|
||||
new ReactiveBulkOperationContext(BulkMode.ORDERED,
|
||||
Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter),
|
||||
new UpdateMapper(converter), null,
|
||||
ReactiveEntityCallbacks.create(beforeConvertCallback, beforeSaveCallback, afterSaveCallback)));
|
||||
|
||||
Person entity = new Person("init");
|
||||
ops.insert(entity);
|
||||
|
||||
ArgumentCaptor<Person> personArgumentCaptor = ArgumentCaptor.forClass(Person.class);
|
||||
verifyNoInteractions(beforeConvertCallback);
|
||||
verifyNoInteractions(beforeSaveCallback);
|
||||
|
||||
ops.execute().then().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
verify(beforeConvertCallback).onBeforeConvert(personArgumentCaptor.capture(), eq("collection-1"));
|
||||
verify(beforeSaveCallback).onBeforeSave(personArgumentCaptor.capture(), any(), eq("collection-1"));
|
||||
verify(afterSaveCallback).onAfterSave(personArgumentCaptor.capture(), any(), eq("collection-1"));
|
||||
assertThat(personArgumentCaptor.getAllValues()).extracting("firstName").containsExactly("init", "before-convert",
|
||||
"before-save");
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
|
||||
InsertOneModel<Document> updateModel = (InsertOneModel<Document>) captor.getValue().get(0);
|
||||
assertThat(updateModel.getDocument()).containsEntry("firstName", "after-save");
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void bulkReplaceOneEmitsEventsCorrectly() {
|
||||
|
||||
ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class);
|
||||
|
||||
ops = new DefaultReactiveBulkOperations(template, "collection-1",
|
||||
new ReactiveBulkOperationContext(BulkMode.ORDERED,
|
||||
Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter),
|
||||
new UpdateMapper(converter), eventPublisher, null));
|
||||
|
||||
ops.replaceOne(query(where("firstName").is("danerys")), new SomeDomainType());
|
||||
|
||||
verify(eventPublisher, never()).publishEvent(any(BeforeConvertEvent.class));
|
||||
verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class));
|
||||
verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class));
|
||||
|
||||
ops.execute().then().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class));
|
||||
verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class));
|
||||
verify(eventPublisher).publishEvent(any(AfterSaveEvent.class));
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void bulkInsertEmitsEventsCorrectly() {
|
||||
|
||||
ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class);
|
||||
|
||||
ops = new DefaultReactiveBulkOperations(template, "collection-1",
|
||||
new ReactiveBulkOperationContext(BulkMode.ORDERED,
|
||||
Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter),
|
||||
new UpdateMapper(converter), eventPublisher, null));
|
||||
|
||||
ops.insert(new SomeDomainType());
|
||||
|
||||
verify(eventPublisher, never()).publishEvent(any(BeforeConvertEvent.class));
|
||||
verify(eventPublisher, never()).publishEvent(any(BeforeSaveEvent.class));
|
||||
verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class));
|
||||
|
||||
ops.execute().then().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
verify(eventPublisher).publishEvent(any(BeforeConvertEvent.class));
|
||||
verify(eventPublisher).publishEvent(any(BeforeSaveEvent.class));
|
||||
verify(eventPublisher).publishEvent(any(AfterSaveEvent.class));
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void noAfterSaveEventOnFailure() {
|
||||
|
||||
ApplicationEventPublisher eventPublisher = mock(ApplicationEventPublisher.class);
|
||||
|
||||
when(collection.bulkWrite(anyList(), any(BulkWriteOptions.class))).thenThrow(new MongoWriteException(
|
||||
new WriteError(89, "NetworkTimeout", new BsonDocument("hi", new BsonString("there"))), null));
|
||||
|
||||
ops = new DefaultReactiveBulkOperations(template, "collection-1",
|
||||
new ReactiveBulkOperationContext(BulkMode.ORDERED,
|
||||
Optional.of(mappingContext.getPersistentEntity(Person.class)), new QueryMapper(converter),
|
||||
new UpdateMapper(converter), eventPublisher, null));
|
||||
|
||||
ops.insert(new SomeDomainType());
|
||||
|
||||
ops.execute().as(StepVerifier::create).expectError();
|
||||
|
||||
verify(eventPublisher, never()).publishEvent(any(AfterSaveEvent.class));
|
||||
}
|
||||
|
||||
@Test // GH-2821
|
||||
void appliesArrayFilterWhenPresent() {
|
||||
|
||||
ops.updateOne(new BasicQuery("{}"), new Update().filterArray(Criteria.where("element").gte(100))).execute()
|
||||
.subscribe();
|
||||
|
||||
verify(collection).bulkWrite(captor.capture(), any());
|
||||
|
||||
UpdateOneModel<Document> updateModel = (UpdateOneModel<Document>) captor.getValue().get(0);
|
||||
assertThat(updateModel.getOptions().getArrayFilters().get(0))
|
||||
.isEqualTo(new org.bson.Document("element", new Document("$gte", 100)));
|
||||
}
|
||||
|
||||
static class BeforeConvertPersonCallback implements ReactiveBeforeConvertCallback<Person> {
|
||||
|
||||
@Override
|
||||
public Mono<Person> onBeforeConvert(Person entity, String collection) {
|
||||
return Mono.just(new Person("before-convert"));
|
||||
}
|
||||
}
|
||||
|
||||
static class BeforeSavePersonCallback implements ReactiveBeforeSaveCallback<Person> {
|
||||
|
||||
@Override
|
||||
public Mono<Person> onBeforeSave(Person entity, Document document, String collection) {
|
||||
|
||||
document.put("firstName", "before-save");
|
||||
return Mono.just(new Person("before-save"));
|
||||
}
|
||||
}
|
||||
|
||||
static class AfterSavePersonCallback implements ReactiveAfterSaveCallback<Person> {
|
||||
|
||||
@Override
|
||||
public Mono<Person> onAfterSave(Person entity, Document document, String collection) {
|
||||
|
||||
document.put("firstName", "after-save");
|
||||
return Mono.just(new Person("after-save"));
|
||||
}
|
||||
}
|
||||
|
||||
class SomeDomainType {
|
||||
|
||||
@Id String id;
|
||||
DefaultBulkOperationsUnitTests.Gender gender;
|
||||
@Field("first_name") String firstName;
|
||||
@Field String lastName;
|
||||
}
|
||||
|
||||
enum Gender {
|
||||
M, F
|
||||
}
|
||||
}
|
||||
@@ -39,7 +39,6 @@ import org.springframework.data.mongodb.test.util.MongoTemplateExtension;
|
||||
import org.springframework.data.mongodb.test.util.ReactiveMongoTestTemplate;
|
||||
import org.springframework.data.mongodb.test.util.Template;
|
||||
|
||||
import com.mongodb.client.model.IndexOptions;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
|
||||
/**
|
||||
@@ -50,7 +49,7 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
@ExtendWith(MongoTemplateExtension.class)
|
||||
public class DefaultReactiveIndexOperationsTests {
|
||||
|
||||
@Template(initialEntitySet = DefaultIndexOperationsIntegrationTestsSample.class) //
|
||||
@Template(initialEntitySet = DefaultIndexOperationsIntegrationTestsSample.class)
|
||||
static ReactiveMongoTestTemplate template;
|
||||
|
||||
String collectionName = template.getCollectionName(DefaultIndexOperationsIntegrationTestsSample.class);
|
||||
@@ -165,50 +164,6 @@ public class DefaultReactiveIndexOperationsTests {
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-4348
|
||||
void indexShouldNotBeHiddenByDefault() {
|
||||
|
||||
IndexDefinition index = new Index().named("my-index").on("a", Direction.ASC);
|
||||
|
||||
indexOps.ensureIndex(index).then().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
indexOps.getIndexInfo().filter(this.indexByName("my-index")).as(StepVerifier::create) //
|
||||
.consumeNextWith(indexInfo -> {
|
||||
assertThat(indexInfo.isHidden()).isFalse();
|
||||
}) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-4348
|
||||
void shouldCreateHiddenIndex() {
|
||||
|
||||
IndexDefinition index = new Index().named("my-hidden-index").on("a", Direction.ASC).hidden();
|
||||
|
||||
indexOps.ensureIndex(index).then().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
indexOps.getIndexInfo().filter(this.indexByName("my-hidden-index")).as(StepVerifier::create) //
|
||||
.consumeNextWith(indexInfo -> {
|
||||
assertThat(indexInfo.isHidden()).isTrue();
|
||||
}) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // GH-4348
|
||||
void alterIndexShouldAllowHiding() {
|
||||
|
||||
template.execute(collectionName, collection -> {
|
||||
return collection.createIndex(new Document("a", 1), new IndexOptions().name("my-index"));
|
||||
}).then().as(StepVerifier::create).verifyComplete();
|
||||
|
||||
indexOps.alterIndex("my-index", org.springframework.data.mongodb.core.index.IndexOptions.hidden())
|
||||
.as(StepVerifier::create).verifyComplete();
|
||||
indexOps.getIndexInfo().filter(this.indexByName("my-index")).as(StepVerifier::create) //
|
||||
.consumeNextWith(indexInfo -> {
|
||||
assertThat(indexInfo.isHidden()).isTrue();
|
||||
}) //
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
Predicate<IndexInfo> indexByName(String name) {
|
||||
return indexInfo -> indexInfo.getName().equals(name);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,181 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.ToString;
|
||||
import reactor.test.StepVerifier;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.test.util.Assertions;
|
||||
import org.springframework.data.mongodb.test.util.Client;
|
||||
import org.springframework.data.mongodb.test.util.MongoClientExtension;
|
||||
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@ExtendWith(MongoClientExtension.class)
|
||||
public class ReactiveDbRefTests {
|
||||
|
||||
private static final String DB_NAME = "reactive-dbref-tests";
|
||||
private static @Client MongoClient client;
|
||||
|
||||
ReactiveMongoTemplate template = new ReactiveMongoTemplate(MongoClients.create(), DB_NAME);
|
||||
MongoTemplate syncTemplate = new MongoTemplate(com.mongodb.client.MongoClients.create(), DB_NAME);
|
||||
|
||||
@Test // GH-2496
|
||||
void loadDbRef() {
|
||||
|
||||
Bar barSource = new Bar();
|
||||
barSource.id = "bar-1";
|
||||
barSource.value = "bar-1-value";
|
||||
syncTemplate.save(barSource);
|
||||
|
||||
Foo fooSource = new Foo();
|
||||
fooSource.id = "foo-1";
|
||||
fooSource.name = "foo-1-name";
|
||||
fooSource.bar = barSource;
|
||||
syncTemplate.save(fooSource);
|
||||
|
||||
template.query(Foo.class).matching(Criteria.where("id").is(fooSource.id)).first().as(StepVerifier::create)
|
||||
.consumeNextWith(foo -> {
|
||||
Assertions.assertThat(foo.bar).isEqualTo(barSource);
|
||||
}).verifyComplete();
|
||||
|
||||
}
|
||||
|
||||
@Test // GH-2496
|
||||
void loadListOFDbRef() {
|
||||
|
||||
Bar bar1Source = new Bar();
|
||||
bar1Source.id = "bar-1";
|
||||
bar1Source.value = "bar-1-value";
|
||||
syncTemplate.save(bar1Source);
|
||||
|
||||
Bar bar2Source = new Bar();
|
||||
bar2Source.id = "bar-1";
|
||||
bar2Source.value = "bar-1-value";
|
||||
syncTemplate.save(bar2Source);
|
||||
|
||||
Foo fooSource = new Foo();
|
||||
fooSource.id = "foo-1";
|
||||
fooSource.name = "foo-1-name";
|
||||
fooSource.bars = List.of(bar1Source, bar2Source);
|
||||
syncTemplate.save(fooSource);
|
||||
|
||||
template.query(Foo.class).matching(Criteria.where("id").is(fooSource.id)).first().as(StepVerifier::create)
|
||||
.consumeNextWith(foo -> {
|
||||
Assertions.assertThat(foo.bars).containsExactly(bar1Source, bar2Source);
|
||||
}).verifyComplete();
|
||||
|
||||
}
|
||||
|
||||
@Test // GH-2496
|
||||
void loadDbRefHoldingJetAnotherOne() {
|
||||
|
||||
Roo rooSource = new Roo();
|
||||
rooSource.id = "roo-1";
|
||||
rooSource.name = "roo-the-kangaroo";
|
||||
syncTemplate.save(rooSource);
|
||||
|
||||
Bar barSource = new Bar();
|
||||
barSource.id = "bar-1";
|
||||
barSource.value = "bar-1-value";
|
||||
barSource.roo = rooSource;
|
||||
syncTemplate.save(barSource);
|
||||
|
||||
Foo fooSource = new Foo();
|
||||
fooSource.id = "foo-1";
|
||||
fooSource.name = "foo-1-name";
|
||||
fooSource.bar = barSource;
|
||||
syncTemplate.save(fooSource);
|
||||
|
||||
template.query(Foo.class).matching(Criteria.where("id").is(fooSource.id)).first().as(StepVerifier::create)
|
||||
.consumeNextWith(foo -> {
|
||||
Assertions.assertThat(foo.bar).isEqualTo(barSource);
|
||||
Assertions.assertThat(foo.bar.roo).isEqualTo(rooSource);
|
||||
}).verifyComplete();
|
||||
|
||||
}
|
||||
|
||||
@Test // GH-2496
|
||||
void loadListOfDbRefHoldingJetAnotherOne() {
|
||||
|
||||
Roo rooSource = new Roo();
|
||||
rooSource.id = "roo-1";
|
||||
rooSource.name = "roo-the-kangaroo";
|
||||
syncTemplate.save(rooSource);
|
||||
|
||||
Bar bar1Source = new Bar();
|
||||
bar1Source.id = "bar-1";
|
||||
bar1Source.value = "bar-1-value";
|
||||
bar1Source.roo = rooSource;
|
||||
syncTemplate.save(bar1Source);
|
||||
|
||||
Bar bar2Source = new Bar();
|
||||
bar2Source.id = "bar-2";
|
||||
bar2Source.value = "bar-2-value";
|
||||
syncTemplate.save(bar2Source);
|
||||
|
||||
Foo fooSource = new Foo();
|
||||
fooSource.id = "foo-1";
|
||||
fooSource.name = "foo-1-name";
|
||||
fooSource.bars = List.of(bar1Source, bar2Source);
|
||||
syncTemplate.save(fooSource);
|
||||
|
||||
template.query(Foo.class).matching(Criteria.where("id").is(fooSource.id)).first().as(StepVerifier::create)
|
||||
.consumeNextWith(foo -> {
|
||||
Assertions.assertThat(foo.bars).containsExactly(bar1Source, bar2Source);
|
||||
}).verifyComplete();
|
||||
|
||||
}
|
||||
|
||||
@ToString
|
||||
static class Foo {
|
||||
String id;
|
||||
String name;
|
||||
|
||||
@DBRef //
|
||||
Bar bar;
|
||||
|
||||
@DBRef //
|
||||
List<Bar> bars;
|
||||
}
|
||||
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
static class Bar {
|
||||
String id;
|
||||
String value;
|
||||
|
||||
@DBRef Roo roo;
|
||||
}
|
||||
|
||||
@ToString
|
||||
@EqualsAndHashCode
|
||||
static class Roo {
|
||||
String id;
|
||||
String name;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class ReactiveValueResolverUnitTests {
|
||||
|
||||
// TODO: lots of tests
|
||||
}
|
||||
@@ -31,7 +31,7 @@ import org.springframework.data.domain.Sort.Direction;
|
||||
* @author Christoph Strobl
|
||||
* @author Stefan Tirea
|
||||
*/
|
||||
class IndexInfoUnitTests {
|
||||
public class IndexInfoUnitTests {
|
||||
|
||||
static final String ID_INDEX = "{ \"v\" : 2, \"key\" : { \"_id\" : 1 }, \"name\" : \"_id_\", \"ns\" : \"db.collection\" }";
|
||||
static final String INDEX_WITH_PARTIAL_FILTER = "{ \"v\" : 2, \"key\" : { \"k3y\" : 1 }, \"name\" : \"partial-filter-index\", \"ns\" : \"db.collection\", \"partialFilterExpression\" : { \"quantity\" : { \"$gte\" : 10 } } }";
|
||||
@@ -39,19 +39,9 @@ class IndexInfoUnitTests {
|
||||
static final String HASHED_INDEX = "{ \"v\" : 2, \"key\" : { \"score\" : \"hashed\" }, \"name\" : \"score_hashed\", \"ns\" : \"db.collection\" }";
|
||||
static final String WILDCARD_INDEX = "{ \"v\" : 2, \"key\" : { \"$**\" : 1 }, \"name\" : \"$**_1\", \"wildcardProjection\" : { \"fieldA\" : 0, \"fieldB.fieldC\" : 0 } }";
|
||||
static final String INDEX_WITH_COLLATION = "{ \"v\" : 2, \"key\" : { \"_id\" : 1 }, \"name\" : \"projectName\", \"collation\": { \"locale\": \"en_US\", \"strength\": 2 } }";
|
||||
static final String HIDDEN_INDEX = """
|
||||
{
|
||||
"v" : 2,
|
||||
"key" : {
|
||||
"borough" : 1
|
||||
},
|
||||
"name" : "borough_1",
|
||||
"hidden" : true
|
||||
}
|
||||
""";
|
||||
|
||||
@Test
|
||||
void isIndexForFieldsCorrectly() {
|
||||
public void isIndexForFieldsCorrectly() {
|
||||
|
||||
IndexField fooField = IndexField.create("foo", Direction.ASC);
|
||||
IndexField barField = IndexField.create("bar", Direction.DESC);
|
||||
@@ -61,29 +51,29 @@ class IndexInfoUnitTests {
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2170
|
||||
void partialFilterExpressionShouldBeNullIfNotSetInSource() {
|
||||
public void partialFilterExpressionShouldBeNullIfNotSetInSource() {
|
||||
assertThat(getIndexInfo(ID_INDEX).getPartialFilterExpression()).isNull();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2170
|
||||
void partialFilterExpressionShouldMatchSource() {
|
||||
public void partialFilterExpressionShouldMatchSource() {
|
||||
|
||||
assertThat(Document.parse(getIndexInfo(INDEX_WITH_PARTIAL_FILTER).getPartialFilterExpression()))
|
||||
.isEqualTo(Document.parse("{ \"quantity\" : { \"$gte\" : 10 } }"));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2081
|
||||
void expireAfterIsParsedCorrectly() {
|
||||
public void expireAfterIsParsedCorrectly() {
|
||||
assertThat(getIndexInfo(INDEX_WITH_EXPIRATION_TIME).getExpireAfter()).contains(Duration.ofHours(1));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2081
|
||||
void expireAfterIsEmptyIfNotSet() {
|
||||
public void expireAfterIsEmptyIfNotSet() {
|
||||
assertThat(getIndexInfo(ID_INDEX).getExpireAfter()).isEmpty();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1183
|
||||
void readsHashedIndexCorrectly() {
|
||||
public void readsHashedIndexCorrectly() {
|
||||
assertThat(getIndexInfo(HASHED_INDEX).getIndexFields()).containsExactly(IndexField.hashed("score"));
|
||||
}
|
||||
|
||||
@@ -93,29 +83,22 @@ class IndexInfoUnitTests {
|
||||
}
|
||||
|
||||
@Test // GH-3225
|
||||
void identifiesWildcardIndexCorrectly() {
|
||||
public void identifiesWildcardIndexCorrectly() {
|
||||
assertThat(getIndexInfo(WILDCARD_INDEX).isWildcard()).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-3225
|
||||
void readsWildcardIndexProjectionCorrectly() {
|
||||
public void readsWildcardIndexProjectionCorrectly() {
|
||||
assertThat(getIndexInfo(WILDCARD_INDEX).getWildcardProjection())
|
||||
.contains(new Document("fieldA", 0).append("fieldB.fieldC", 0));
|
||||
}
|
||||
|
||||
@Test // GH-3002
|
||||
void collationParsedCorrectly() {
|
||||
public void collationParsedCorrectly() {
|
||||
assertThat(getIndexInfo(INDEX_WITH_COLLATION).getCollation())
|
||||
.contains(Document.parse("{ \"locale\": \"en_US\", \"strength\": 2 }"));
|
||||
}
|
||||
|
||||
@Test // GH-4348
|
||||
void hiddenInfoSetCorrectly() {
|
||||
|
||||
assertThat(getIndexInfo(ID_INDEX).isHidden()).isFalse();
|
||||
assertThat(getIndexInfo(HIDDEN_INDEX).isHidden()).isTrue();
|
||||
}
|
||||
|
||||
private static IndexInfo getIndexInfo(String documentJson) {
|
||||
return IndexInfo.indexInfoOf(Document.parse(documentJson));
|
||||
}
|
||||
|
||||
@@ -61,7 +61,6 @@ import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.repository.Hint;
|
||||
import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.mongodb.repository.Update;
|
||||
@@ -459,7 +458,7 @@ class AbstractMongoQueryUnitTests {
|
||||
void updateExecutionCallsUpdateAllCorrectly() {
|
||||
|
||||
when(terminatingUpdate.all()).thenReturn(updateResultMock);
|
||||
|
||||
|
||||
createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) //
|
||||
.execute(new Object[] { "dalinar", 100 });
|
||||
|
||||
@@ -470,29 +469,6 @@ class AbstractMongoQueryUnitTests {
|
||||
assertThat(update.getValue().getUpdateObject()).isEqualTo(Document.parse("{ '$inc' : { 'visits' : 100 } }"));
|
||||
}
|
||||
|
||||
@Test // GH-3230
|
||||
void findShouldApplyHint() {
|
||||
|
||||
createQueryForMethod("findWithHintByFirstname", String.class).execute(new Object[] { "Jasna" });
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
verify(withQueryMock).matching(captor.capture());
|
||||
assertThat(captor.getValue().getHint()).isEqualTo("idx-fn");
|
||||
}
|
||||
|
||||
@Test // GH-3230
|
||||
void updateShouldApplyHint() {
|
||||
|
||||
when(terminatingUpdate.all()).thenReturn(updateResultMock);
|
||||
|
||||
createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) //
|
||||
.execute(new Object[] { "dalinar", 100 });
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
verify(executableUpdate).matching(captor.capture());
|
||||
assertThat(captor.getValue().getHint()).isEqualTo("idx-ln");
|
||||
}
|
||||
|
||||
private MongoQueryFake createQueryForMethod(String methodName, Class<?>... paramTypes) {
|
||||
return createQueryForMethod(Repo.class, methodName, paramTypes);
|
||||
}
|
||||
@@ -608,12 +584,8 @@ class AbstractMongoQueryUnitTests {
|
||||
@org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }")
|
||||
List<Person> findWithWithCollationParameterAndAnnotationByFirstName(String firstname, Collation collation);
|
||||
|
||||
@Hint("idx-ln")
|
||||
@Update("{ '$inc' : { 'visits' : ?1 } }")
|
||||
void findAndIncreaseVisitsByLastname(String lastname, int value);
|
||||
|
||||
@Hint("idx-fn")
|
||||
void findWithHintByFirstname(String firstname);
|
||||
}
|
||||
|
||||
// DATAMONGO-1872
|
||||
|
||||
@@ -18,15 +18,6 @@ package org.springframework.data.mongodb.repository.query;
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate;
|
||||
import org.springframework.data.mongodb.core.ReactiveUpdateOperation.ReactiveUpdate;
|
||||
import org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.repository.Hint;
|
||||
import org.springframework.data.mongodb.repository.Update;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@@ -80,9 +71,6 @@ class AbstractReactiveMongoQueryUnitTests {
|
||||
|
||||
@Mock ReactiveFind<?> executableFind;
|
||||
@Mock FindWithQuery<?> withQueryMock;
|
||||
@Mock ReactiveUpdate executableUpdate;
|
||||
@Mock UpdateWithQuery updateWithQuery;
|
||||
@Mock TerminatingUpdate terminatingUpdate;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
@@ -103,11 +91,6 @@ class AbstractReactiveMongoQueryUnitTests {
|
||||
doReturn(Flux.empty()).when(withQueryMock).all();
|
||||
doReturn(Mono.empty()).when(withQueryMock).first();
|
||||
doReturn(Mono.empty()).when(withQueryMock).one();
|
||||
|
||||
doReturn(executableUpdate).when(mongoOperationsMock).update(any());
|
||||
doReturn(executableUpdate).when(executableUpdate).inCollection(anyString());
|
||||
doReturn(updateWithQuery).when(executableUpdate).matching(any(Query.class));
|
||||
doReturn(terminatingUpdate).when(updateWithQuery).apply(any(UpdateDefinition.class));
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-1854
|
||||
@@ -240,29 +223,6 @@ class AbstractReactiveMongoQueryUnitTests {
|
||||
.contains(Collation.of("en_US").toDocument());
|
||||
}
|
||||
|
||||
@Test // GH-3230
|
||||
void findShouldApplyHint() {
|
||||
|
||||
createQueryForMethod("findWithHintByFirstname", String.class).executeBlocking(new Object[] { "Jasna" });
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
verify(withQueryMock).matching(captor.capture());
|
||||
assertThat(captor.getValue().getHint()).isEqualTo("idx-fn");
|
||||
}
|
||||
|
||||
@Test // GH-3230
|
||||
void updateShouldApplyHint() {
|
||||
|
||||
when(terminatingUpdate.all()).thenReturn(Mono.just(mock(UpdateResult.class)));
|
||||
|
||||
createQueryForMethod("findAndIncreaseVisitsByLastname", String.class, int.class) //
|
||||
.executeBlocking(new Object[] { "dalinar", 100 });
|
||||
|
||||
ArgumentCaptor<Query> captor = ArgumentCaptor.forClass(Query.class);
|
||||
verify(executableUpdate).matching(captor.capture());
|
||||
assertThat(captor.getValue().getHint()).isEqualTo("idx-ln");
|
||||
}
|
||||
|
||||
private ReactiveMongoQueryFake createQueryForMethod(String methodName, Class<?>... paramTypes) {
|
||||
return createQueryForMethod(Repo.class, methodName, paramTypes);
|
||||
}
|
||||
@@ -331,11 +291,6 @@ class AbstractReactiveMongoQueryUnitTests {
|
||||
isLimitingQuery = limitingQuery;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Mono<CodecRegistry> getCodecRegistry() {
|
||||
return Mono.just(MongoClientSettings.getDefaultCodecRegistry());
|
||||
}
|
||||
}
|
||||
|
||||
private interface Repo extends ReactiveMongoRepository<Person, Long> {
|
||||
@@ -360,12 +315,5 @@ class AbstractReactiveMongoQueryUnitTests {
|
||||
|
||||
@org.springframework.data.mongodb.repository.Query(collation = "{ 'locale' : 'en_US' }")
|
||||
List<Person> findWithWithCollationParameterAndAnnotationByFirstName(String firstname, Collation collation);
|
||||
|
||||
@Hint("idx-ln")
|
||||
@Update("{ '$inc' : { 'visits' : ?1 } }")
|
||||
void findAndIncreaseVisitsByLastname(String lastname, int value);
|
||||
|
||||
@Hint("idx-fn")
|
||||
void findWithHintByFirstname(String firstname);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ import static org.mockito.Mockito.*;
|
||||
|
||||
import lombok.Value;
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.data.mongodb.repository.Hint;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@@ -79,7 +78,6 @@ public class ReactiveStringBasedAggregationUnitTests {
|
||||
|
||||
private static final String RAW_SORT_STRING = "{ '$sort' : { 'lastname' : -1 } }";
|
||||
private static final String RAW_GROUP_BY_LASTNAME_STRING = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$firstname' } } }";
|
||||
private static final String RAW_OUT = "{ '$out' : 'authors' }";
|
||||
private static final String GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', names : { '$addToSet' : '$?0' } } }";
|
||||
private static final String GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$?#{[0]}' } } }";
|
||||
|
||||
@@ -175,13 +173,6 @@ public class ReactiveStringBasedAggregationUnitTests {
|
||||
verify(operations).execute(any());
|
||||
}
|
||||
|
||||
@Test // GH-3230
|
||||
void aggregatePicksUpHintFromAnnotation() {
|
||||
|
||||
AggregationInvocation invocation = executeAggregation("withHint");
|
||||
assertThat(hintOf(invocation)).isEqualTo("idx");
|
||||
}
|
||||
|
||||
private AggregationInvocation executeAggregation(String name, Object... args) {
|
||||
|
||||
Class<?>[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(size -> new Class<?>[size]);
|
||||
@@ -197,22 +188,6 @@ public class ReactiveStringBasedAggregationUnitTests {
|
||||
return new AggregationInvocation(aggregationCaptor.getValue(), targetTypeCaptor.getValue(), result);
|
||||
}
|
||||
|
||||
@Test // GH-4088
|
||||
void aggregateWithVoidReturnTypeSkipsResultOnOutStage() {
|
||||
|
||||
AggregationInvocation invocation = executeAggregation("outSkipResult");
|
||||
|
||||
assertThat(skipResultsOf(invocation)).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4088
|
||||
void aggregateWithOutStageDoesNotSkipResults() {
|
||||
|
||||
AggregationInvocation invocation = executeAggregation("outDoNotSkipResult");
|
||||
|
||||
assertThat(skipResultsOf(invocation)).isFalse();
|
||||
}
|
||||
|
||||
private ReactiveStringBasedAggregation createAggregationForMethod(String name, Class<?>... parameters) {
|
||||
|
||||
Method method = ClassUtils.getMethod(SampleRepository.class, name, parameters);
|
||||
@@ -241,17 +216,6 @@ public class ReactiveStringBasedAggregationUnitTests {
|
||||
: null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Object hintOf(AggregationInvocation invocation) {
|
||||
return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getHintObject().orElse(null)
|
||||
: null;
|
||||
}
|
||||
|
||||
private Boolean skipResultsOf(AggregationInvocation invocation) {
|
||||
return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().isSkipResults()
|
||||
: false;
|
||||
}
|
||||
|
||||
private Class<?> targetTypeOf(AggregationInvocation invocation) {
|
||||
return invocation.getTargetType();
|
||||
}
|
||||
@@ -279,16 +243,6 @@ public class ReactiveStringBasedAggregationUnitTests {
|
||||
|
||||
@Aggregation(pipeline = RAW_GROUP_BY_LASTNAME_STRING, collation = "de_AT")
|
||||
Mono<PersonAggregate> aggregateWithCollation(Collation collation);
|
||||
|
||||
@Hint("idx")
|
||||
@Aggregation(RAW_GROUP_BY_LASTNAME_STRING)
|
||||
String withHint();
|
||||
|
||||
@Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT })
|
||||
Flux<Person> outDoNotSkipResult();
|
||||
|
||||
@Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT })
|
||||
Mono<Void> outSkipResult();
|
||||
}
|
||||
|
||||
static class PersonAggregate {
|
||||
|
||||
@@ -58,7 +58,6 @@ import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.repository.Aggregation;
|
||||
import org.springframework.data.mongodb.repository.Hint;
|
||||
import org.springframework.data.mongodb.repository.Meta;
|
||||
import org.springframework.data.mongodb.repository.Person;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
@@ -92,7 +91,6 @@ public class StringBasedAggregationUnitTests {
|
||||
|
||||
private static final String RAW_SORT_STRING = "{ '$sort' : { 'lastname' : -1 } }";
|
||||
private static final String RAW_GROUP_BY_LASTNAME_STRING = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$firstname' } } }";
|
||||
private static final String RAW_OUT = "{ '$out' : 'authors' }";
|
||||
private static final String GROUP_BY_LASTNAME_STRING_WITH_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', names : { '$addToSet' : '$?0' } } }";
|
||||
private static final String GROUP_BY_LASTNAME_STRING_WITH_SPEL_PARAMETER_PLACEHOLDER = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$?#{[0]}' } } }";
|
||||
|
||||
@@ -262,29 +260,6 @@ public class StringBasedAggregationUnitTests {
|
||||
.withMessageContaining("Page");
|
||||
}
|
||||
|
||||
@Test // GH-3230
|
||||
void aggregatePicksUpHintFromAnnotation() {
|
||||
|
||||
AggregationInvocation invocation = executeAggregation("withHint");
|
||||
assertThat(hintOf(invocation)).isEqualTo("idx");
|
||||
}
|
||||
|
||||
@Test // GH-4088
|
||||
void aggregateWithVoidReturnTypeSkipsResultOnOutStage() {
|
||||
|
||||
AggregationInvocation invocation = executeAggregation("outSkipResult");
|
||||
|
||||
assertThat(skipResultsOf(invocation)).isTrue();
|
||||
}
|
||||
|
||||
@Test // GH-4088
|
||||
void aggregateWithOutStageDoesNotSkipResults() {
|
||||
|
||||
AggregationInvocation invocation = executeAggregation("outDoNotSkipResult");
|
||||
|
||||
assertThat(skipResultsOf(invocation)).isFalse();
|
||||
}
|
||||
|
||||
private AggregationInvocation executeAggregation(String name, Object... args) {
|
||||
|
||||
Class<?>[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(Class[]::new);
|
||||
@@ -327,17 +302,6 @@ public class StringBasedAggregationUnitTests {
|
||||
: null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Object hintOf(AggregationInvocation invocation) {
|
||||
return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().getHintObject().orElse(null)
|
||||
: null;
|
||||
}
|
||||
|
||||
private Boolean skipResultsOf(AggregationInvocation invocation) {
|
||||
return invocation.aggregation.getOptions() != null ? invocation.aggregation.getOptions().isSkipResults()
|
||||
: false;
|
||||
}
|
||||
|
||||
private Class<?> targetTypeOf(AggregationInvocation invocation) {
|
||||
return invocation.getTargetType();
|
||||
}
|
||||
@@ -386,16 +350,6 @@ public class StringBasedAggregationUnitTests {
|
||||
|
||||
@Aggregation(RAW_GROUP_BY_LASTNAME_STRING)
|
||||
String simpleReturnType();
|
||||
|
||||
@Hint("idx")
|
||||
@Aggregation(RAW_GROUP_BY_LASTNAME_STRING)
|
||||
String withHint();
|
||||
|
||||
@Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT })
|
||||
List<Person> outDoNotSkipResult();
|
||||
|
||||
@Aggregation(pipeline = { RAW_GROUP_BY_LASTNAME_STRING, RAW_OUT })
|
||||
void outSkipResult();
|
||||
}
|
||||
|
||||
private interface UnsupportedRepository extends Repository<Person, Long> {
|
||||
|
||||
@@ -17,6 +17,8 @@ package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import static org.assertj.core.api.Assertions.*;
|
||||
|
||||
import org.junit.Ignore;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.test.StepVerifier;
|
||||
@@ -245,11 +247,15 @@ public class ReactiveQuerydslMongoPredicateExecutorTests {
|
||||
.join(person.coworker, QUser.user).on(QUser.user.username.eq("user-2")).fetch();
|
||||
|
||||
result.as(StepVerifier::create) //
|
||||
.expectError(UnsupportedOperationException.class) //
|
||||
.verify();
|
||||
.consumeNextWith(it -> {
|
||||
assertThat(it.getCoworker()).isNotNull();
|
||||
assertThat(it.getCoworker().getUsername()).isEqualTo(user2.getUsername());
|
||||
})
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2182
|
||||
@Ignore("This should actually return Mono.emtpy() but seems to read all entries somehow - need to check!")
|
||||
public void queryShouldTerminateWithUnsupportedOperationOnJoinWithNoResults() {
|
||||
|
||||
User user1 = new User();
|
||||
@@ -283,8 +289,7 @@ public class ReactiveQuerydslMongoPredicateExecutorTests {
|
||||
.join(person.coworker, QUser.user).on(QUser.user.username.eq("does-not-exist")).fetch();
|
||||
|
||||
result.as(StepVerifier::create) //
|
||||
.expectError(UnsupportedOperationException.class) //
|
||||
.verify();
|
||||
.verifyComplete(); // should not find anything should it?
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2182
|
||||
|
||||
@@ -27,8 +27,6 @@ import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.bson.BsonBinary;
|
||||
import org.bson.BsonBinarySubType;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.DecoderContext;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -211,46 +209,6 @@ class ParameterBindingJsonReaderUnitTests {
|
||||
assertThat(target).isEqualTo(Document.parse("{ 'end_date' : { $gte : { $date : " + time + " } } } "));
|
||||
}
|
||||
|
||||
@Test // GH-3750
|
||||
public void shouldParseISODate() {
|
||||
|
||||
String json = "{ 'value' : ISODate(\"1970-01-01T00:00:00Z\") }";
|
||||
Date value = parse(json).get("value", Date.class);
|
||||
assertThat(value.getTime()).isZero();
|
||||
}
|
||||
|
||||
@Test // GH-3750
|
||||
public void shouldParseISODateWith24HourTimeSpecification() {
|
||||
|
||||
String json = "{ 'value' : ISODate(\"2013-10-04T12:07:30.443Z\") }";
|
||||
Date value = parse(json).get("value", Date.class);
|
||||
assertThat(value.getTime()).isEqualTo(1380888450443L);
|
||||
}
|
||||
|
||||
@Test // GH-3750
|
||||
public void shouldParse$date() {
|
||||
|
||||
String json = "{ 'value' : { \"$date\" : \"2015-04-16T14:55:57.626Z\" } }";
|
||||
Date value = parse(json).get("value", Date.class);
|
||||
assertThat(value.getTime()).isEqualTo(1429196157626L);
|
||||
}
|
||||
|
||||
@Test // GH-3750
|
||||
public void shouldParse$dateWithTimeOffset() {
|
||||
|
||||
String json = "{ 'value' :{ \"$date\" : \"2015-04-16T16:55:57.626+02:00\" } }";
|
||||
Date value = parse(json).get("value", Date.class);
|
||||
assertThat(value.getTime()).isEqualTo(1429196157626L);
|
||||
}
|
||||
|
||||
@Test // GH-4282
|
||||
public void shouldReturnNullAsSuch() {
|
||||
|
||||
String json = "{ 'value' : ObjectId(?0) }";
|
||||
assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> parse(json, new Object[] { null }))
|
||||
.withMessageContaining("hexString");
|
||||
}
|
||||
|
||||
@Test // DATAMONGO-2418
|
||||
void shouldNotAccessSpElEvaluationContextWhenNoSpElPresentInBindableTarget() {
|
||||
|
||||
@@ -526,6 +484,7 @@ class ParameterBindingJsonReaderUnitTests {
|
||||
assertThat(target).isEqualTo(new Document("parent", null));
|
||||
}
|
||||
|
||||
|
||||
@Test // GH-4089
|
||||
void retainsSpelArgumentTypeViaArgumentIndex() {
|
||||
|
||||
@@ -588,23 +547,6 @@ class ParameterBindingJsonReaderUnitTests {
|
||||
assertThat(target.get("arg0")).isEqualTo(source);
|
||||
}
|
||||
|
||||
@Test // GH-3750
|
||||
void shouldParseUUIDasStandardRepresentation() {
|
||||
|
||||
String json = "{ 'value' : UUID(\"b5f21e0c-2a0d-42d6-ad03-d827008d8ab6\") }";
|
||||
|
||||
BsonBinary value = parse(json).get("value", BsonBinary.class);
|
||||
assertThat(value.getType()).isEqualTo(BsonBinarySubType.UUID_STANDARD.getValue());
|
||||
}
|
||||
|
||||
@Test // GH-3750
|
||||
public void shouldParse$uuidAsStandardRepresentation() {
|
||||
|
||||
String json = "{ 'value' : { '$uuid' : \"73ff-d26444b-34c6-990e8e-7d1dfc035d4\" } } }";
|
||||
BsonBinary value = parse(json).get("value", BsonBinary.class);
|
||||
assertThat(value.getType()).isEqualTo(BsonBinarySubType.UUID_STANDARD.getValue());
|
||||
}
|
||||
|
||||
private static Document parse(String json, Object... args) {
|
||||
|
||||
ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, args);
|
||||
|
||||
@@ -37,12 +37,6 @@ public interface PersonRepository extends CrudRepository<Person, String> {
|
||||
|
||||
@Aggregation("{ '$project': { '_id' : '$lastname' } }")
|
||||
List<String> findAllLastnames(); <9>
|
||||
|
||||
@Aggregation(pipeline = {
|
||||
"{ $group : { _id : '$author', books: { $push: '$title' } } }",
|
||||
"{ $out : 'authors' }"
|
||||
})
|
||||
void groupAndOutSkippingOutput(); <10>
|
||||
}
|
||||
----
|
||||
[source,java]
|
||||
@@ -81,7 +75,6 @@ Therefore, the `Sort` properties are mapped against the methods return type `Per
|
||||
To gain more control, you might consider `AggregationResult` as method return type as shown in <7>.
|
||||
<8> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`.
|
||||
<9> Like in <6>, a single value can be directly obtained from multiple result ``Document``s.
|
||||
<10> Skips the output of the `$out` stage when return type is `void`.
|
||||
====
|
||||
|
||||
In some scenarios, aggregations might require additional options, such as a maximum run time, additional log comments, or the permission to temporarily write data to disk.
|
||||
|
||||
@@ -297,26 +297,6 @@ lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range`
|
||||
|
||||
NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters.
|
||||
|
||||
[[mongodb.repositories.queries.hint]]
|
||||
=== Repository Index Hints
|
||||
|
||||
The `@Hint` annotation allows to override MongoDB's default index selection and forces the database to use the specified index instead.
|
||||
|
||||
.Example of index hints
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Hint("lastname-idx") <1>
|
||||
List<Person> findByLastname(String lastname);
|
||||
|
||||
@Query(value = "{ 'firstname' : ?0 }", hint = "firstname-idx") <2>
|
||||
List<Person> findByFirstname(String firstname);
|
||||
----
|
||||
|
||||
<1> Use the index with name `lastname-idx`.
|
||||
<2> The `@Query` annotation defines the `hint` alias which is equivalent to adding the `@Hint` annotation.
|
||||
====
|
||||
|
||||
[[mongodb.repositories.queries.update]]
|
||||
=== Repository Update Methods
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Spring Data MongoDB 4.1 RC1 (2023.0.0)
|
||||
Spring Data MongoDB 4.1 M3 (2023.0.0)
|
||||
Copyright (c) [2010-2019] Pivotal Software, Inc.
|
||||
|
||||
This product is licensed to you under the Apache License, Version 2.0 (the "License").
|
||||
@@ -43,6 +43,5 @@ conditions of the subcomponent's license, as noted in the LICENSE file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user