Compare commits
58 Commits
3.0.0.RC2
...
2.2.4.RELE
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6d2f7b0c9e | ||
|
|
84128cab28 | ||
|
|
8f2d2784f8 | ||
|
|
37ee677a67 | ||
|
|
1729744a9e | ||
|
|
b59903c890 | ||
|
|
5063e68562 | ||
|
|
3cbb572c7c | ||
|
|
bb4f11a239 | ||
|
|
8a1750691b | ||
|
|
5cffaf9d73 | ||
|
|
a9e50b28df | ||
|
|
24512639fb | ||
|
|
1355d043b6 | ||
|
|
c7600ef9ed | ||
|
|
a4cb0d6432 | ||
|
|
437368bedb | ||
|
|
4057b193d6 | ||
|
|
89b1b5c7f6 | ||
|
|
60d3438277 | ||
|
|
a578a10b5b | ||
|
|
5cf24af00b | ||
|
|
6c0e455146 | ||
|
|
932a946868 | ||
|
|
99a1cfbff9 | ||
|
|
d7bbdde1e7 | ||
|
|
7dba98dce8 | ||
|
|
c1ae30bd82 | ||
|
|
6aa5aea424 | ||
|
|
bc1b00813c | ||
|
|
d1ad3ab301 | ||
|
|
923134bbdc | ||
|
|
e211f69df5 | ||
|
|
fc35d706a0 | ||
|
|
82894e6aff | ||
|
|
7356f157bb | ||
|
|
783fc6268a | ||
|
|
360b17f299 | ||
|
|
2cfcdaff7c | ||
|
|
9d9cf46e47 | ||
|
|
98661cf9a2 | ||
|
|
cc50cd5e3a | ||
|
|
d8399d2d23 | ||
|
|
f2134fb2f8 | ||
|
|
ec3ccc004e | ||
|
|
6cb246c18a | ||
|
|
e73cea0ecf | ||
|
|
c69e185a2a | ||
|
|
5789f59222 | ||
|
|
5178eeb340 | ||
|
|
bc5e7fa4a2 | ||
|
|
c28ace6d40 | ||
|
|
de4fae37e1 | ||
|
|
2f1aff3ec3 | ||
|
|
6970f934bd | ||
|
|
6b5168e102 | ||
|
|
4420edb4dc | ||
|
|
c2fae95fee |
28
.travis.yml
28
.travis.yml
@@ -1,5 +1,8 @@
|
||||
language: java
|
||||
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
|
||||
before_install:
|
||||
- mkdir -p downloads
|
||||
- mkdir -p var/db var/log
|
||||
@@ -11,22 +14,20 @@ before_install:
|
||||
downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongo --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});"
|
||||
sleep 15
|
||||
|
||||
jdk:
|
||||
- openjdk13
|
||||
- openjdk-ea
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- jdk: openjdk-ea
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- MONGO_VERSION=4.2.0
|
||||
- MONGO_VERSION=4.0.14
|
||||
- MONGO_VERSION=3.6.16
|
||||
- MONGO_VERSION=4.1.10
|
||||
- MONGO_VERSION=4.0.4
|
||||
- MONGO_VERSION=3.6.12
|
||||
- MONGO_VERSION=3.4.20
|
||||
global:
|
||||
- PROFILE=ci
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- oracle-java8-installer
|
||||
|
||||
sudo: false
|
||||
|
||||
cache:
|
||||
@@ -34,7 +35,4 @@ cache:
|
||||
- $HOME/.m2
|
||||
- downloads
|
||||
|
||||
install: true
|
||||
|
||||
script:
|
||||
- "./mvnw clean dependency:list test -Pjava11 -Dsort -U"
|
||||
script: "mvn clean dependency:list test -P${PROFILE} -Dsort"
|
||||
|
||||
113
Jenkinsfile
vendored
113
Jenkinsfile
vendored
@@ -3,7 +3,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/2.2.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -30,51 +30,51 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.1') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.1/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.1", "ci/openjdk8-mongodb-4.1/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 14 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk14-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk14-with-mongodb-4.2.0", "ci/openjdk14-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk8)") {
|
||||
stage("test: baseline") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch '2.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
@@ -83,23 +83,23 @@ pipeline {
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch '2.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.0 (jdk8)") {
|
||||
stage("test: mongodb 4.0") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
@@ -111,18 +111,17 @@ pipeline {
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 4.2 (jdk8)") {
|
||||
stage("test: mongodb 4.1") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.1:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
@@ -131,31 +130,11 @@ pipeline {
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk14)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk14-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -164,7 +143,7 @@ pipeline {
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch '2.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -196,7 +175,7 @@ pipeline {
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch 'master'
|
||||
branch '2.2.x'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
|
||||
141
README.adoc
141
README.adoc
@@ -50,11 +50,11 @@ public class MyService {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() {
|
||||
return MongoClients.create();
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -94,143 +94,6 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Upgrading from 2.x
|
||||
|
||||
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||
|
||||
=== XML Namespace
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
| Now exposes a `com.mongodb.client.MongoClient`
|
||||
|
||||
| `<mongo:mongo-client replica-set="..." />`
|
||||
| Was a comma delimited list of replica set members (host/port)
|
||||
| Now defines the replica set name. +
|
||||
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
| `<mongo:db-factory writeConcern="..." />`
|
||||
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
|
||||
|===
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Referencing a `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `<mongo:mongo-client credentials="..." />`
|
||||
| `<mongo:mongo-client credential="..." />`
|
||||
| Single authentication data instead of list.
|
||||
|
||||
| `<mongo:client-options />`
|
||||
| `<mongo:client-settings />`
|
||||
| See `com.mongodb.MongoClientSettings` for details.
|
||||
|===
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
|
||||
| `<mongo:db-factory connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:mongo-client connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:client-settings />`
|
||||
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||
|
||||
|===
|
||||
|
||||
=== Java Configuration
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
| `MongoDataIntegrityViolationException`
|
||||
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||
|
||||
| `BulkOperationException`
|
||||
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||
|
||||
| `ReactiveMongoClientFactoryBean`
|
||||
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|
||||
| `ReactiveMongoClientSettingsFactoryBean`
|
||||
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|===
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
| Creating a `com.mongodb.MongoClientSettings`.
|
||||
|
||||
| `AbstractMongoConfiguration`
|
||||
| `AbstractMongoClientConfiguration` +
|
||||
(Available since 2.1)
|
||||
| Using `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `MongoDbFactory#getLegacyDb()`
|
||||
| -
|
||||
| -
|
||||
|
||||
| `SimpleMongoDbFactory`
|
||||
| `SimpleMongoClientDbFactory` +
|
||||
(Available since 2.1)
|
||||
|
|
||||
|
||||
| `MapReduceOptions#getOutputType()`
|
||||
| `MapReduceOptions#getMapReduceAction()`
|
||||
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||
|
||||
| `Meta\|Query` maxScan & snapshot
|
||||
|
|
||||
|
|
||||
|===
|
||||
|
||||
=== Other Changes
|
||||
|
||||
==== UUID Types
|
||||
|
||||
The MongoDB UUID representation can now be configured with different formats.
|
||||
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||
|
||||
.UUID Codec Configuration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk14:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,15 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,15 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
26
pom.xml
26
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.2.4.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.3.0.RC2</version>
|
||||
<version>2.2.4.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,9 +26,9 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.3.0.RC2</springdata.commons>
|
||||
<mongo>4.0.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<springdata.commons>2.2.4.RELEASE</springdata.commons>
|
||||
<mongo>3.11.2</mongo>
|
||||
<mongo.reactivestreams>1.12.0</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -127,25 +127,15 @@
|
||||
<!-- MongoDB -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.2.4.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.2.4.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.2.4.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -65,12 +65,6 @@
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -88,14 +82,6 @@
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-sync</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
@@ -103,6 +89,23 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>bson</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
@@ -250,13 +253,6 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.junit-pioneer</groupId>
|
||||
<artifactId>junit-pioneer</artifactId>
|
||||
<version>0.5.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
@@ -344,6 +340,12 @@
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
@@ -19,9 +19,9 @@ import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
@@ -38,12 +38,12 @@ public class BulkOperationException extends DataAccessException {
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}.
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, MongoBulkWriteException source) {
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} from the underlying factory.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDatabaseFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,7 @@ import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDbFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <p />
|
||||
@@ -41,94 +41,93 @@ import com.mongodb.client.MongoDatabase;
|
||||
public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory) {
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory) {
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory,
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
ClientSession session = doGetSession(factory, sessionSynchronization);
|
||||
|
||||
if (session == null) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
MongoDatabaseFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase();
|
||||
MongoDbFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getDb(dbName) : factoryToUse.getDb();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active
|
||||
* transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory
|
||||
* resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active
|
||||
* transaction}.
|
||||
* Check if the {@link MongoDbFactory} is actually bound to a {@link ClientSession} that has an active transaction, or
|
||||
* if a {@link TransactionSynchronization} has been registered for the {@link MongoDbFactory resource} and if the
|
||||
* associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param dbFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return {@literal true} if the factory has an ongoing transaction.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) {
|
||||
public static boolean isTransactionActive(MongoDbFactory dbFactory) {
|
||||
|
||||
if (dbFactory.isTransactionActive()) {
|
||||
return true;
|
||||
@@ -139,8 +138,7 @@ public class MongoDatabaseUtils {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static ClientSession doGetSession(MongoDatabaseFactory dbFactory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory);
|
||||
|
||||
@@ -171,7 +169,7 @@ public class MongoDatabaseUtils {
|
||||
return resourceHolder.getSession();
|
||||
}
|
||||
|
||||
private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) {
|
||||
private static ClientSession createClientSession(MongoDbFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
@@ -186,7 +184,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
private final MongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) {
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDbFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory);
|
||||
this.resourceHolder = resourceHolder;
|
||||
|
||||
@@ -15,8 +15,14 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -25,33 +31,92 @@ import com.mongodb.client.MongoDatabase;
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||
public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb() throws DataAccessException {
|
||||
return getMongoDatabase();
|
||||
}
|
||||
MongoDatabase getDb() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Creates a {@link DB} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||
*/
|
||||
MongoDatabase getDb(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the legacy database entry point. Please consider {@link #getDb()} instead.
|
||||
*
|
||||
* @return
|
||||
* @deprecated since 2.1, use {@link #getDb()}. This method will be removed with a future version as it works only
|
||||
* with the legacy MongoDB driver.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return getMongoDatabase(dbName);
|
||||
DB getLegacyDb();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getDb().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDbFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDbFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDbFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,15 +36,15 @@ import com.mongodb.client.ClientSession;
|
||||
class MongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private MongoDatabaseFactory dbFactory;
|
||||
private MongoDbFactory dbFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) {
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDbFactory dbFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -75,9 +75,9 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link MongoDatabaseFactory}.
|
||||
* @return the associated {@link MongoDbFactory}.
|
||||
*/
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -36,18 +36,17 @@ import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDbFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDbFactory} to the thread.
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call.
|
||||
* Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
@@ -59,46 +58,46 @@ import com.mongodb.client.ClientSession;
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
*/
|
||||
public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
implements ResourceTransactionManager, InitializingBean {
|
||||
|
||||
private @Nullable MongoDatabaseFactory dbFactory;
|
||||
private @Nullable MongoDbFactory dbFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <strong>Note:</strong>The {@link MongoDbFactory db factory} has to be {@link #setDbFactory(MongoDbFactory) set}
|
||||
* before using the instance. Use this constructor to prepare a {@link MongoTransactionManager} via a
|
||||
* {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDbFactory(MongoDatabaseFactory)
|
||||
* @see #setDbFactory(MongoDbFactory)
|
||||
* @see #setTransactionSynchronization(int)
|
||||
*/
|
||||
public MongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory}.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory) {
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory) {
|
||||
this(dbFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}
|
||||
* applying the given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory} applying the
|
||||
* given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
|
||||
@@ -296,11 +295,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoDatabaseFactory} that this instance should manage transactions for.
|
||||
* Set the {@link MongoDbFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||
public void setDbFactory(MongoDbFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -316,12 +315,12 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link MongoDatabaseFactory} that this instance manages transactions for.
|
||||
* Get the {@link MongoDbFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
@@ -330,7 +329,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory getResourceFactory() {
|
||||
public MongoDbFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
@@ -345,7 +344,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) {
|
||||
|
||||
MongoDatabaseFactory dbFactory = getResourceFactory();
|
||||
MongoDbFactory dbFactory = getResourceFactory();
|
||||
|
||||
MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory);
|
||||
resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition));
|
||||
@@ -356,7 +355,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #dbFactory} is {@literal null}.
|
||||
*/
|
||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||
private MongoDbFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
@@ -31,7 +31,6 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
@@ -42,16 +41,16 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
Mono<MongoDatabase> getMongoDatabase() throws DataAccessException;
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Creates a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException;
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
@@ -65,7 +64,10 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CodecRegistry getCodecRegistry();
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}.
|
||||
|
||||
@@ -41,7 +41,6 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
@@ -143,13 +142,14 @@ public class ReactiveMongoDatabaseUtils {
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory))
|
||||
.switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
@@ -99,7 +99,7 @@ class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoDriverInformation;
|
||||
|
||||
/**
|
||||
* Class that exposes the SpringData MongoDB specific information like the current {@link Version} or
|
||||
* {@link MongoDriverInformation driver information}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
.builder(MongoDriverInformation.builder().build()).driverName("spring-data").build();
|
||||
|
||||
/**
|
||||
* Obtain the SpringData MongoDB specific driver information.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static MongoDriverInformation driverInformation() {
|
||||
return DRIVER_INFORMATION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <p />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static Version version() {
|
||||
|
||||
Package pkg = SpringDataMongoDB.class.getPackage();
|
||||
String versionString = (pkg != null ? pkg.getImplementationVersion() : null);
|
||||
|
||||
if (!StringUtils.hasText(versionString)) {
|
||||
|
||||
LOGGER.debug("Unable to find Spring Data MongoDB version.");
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString);
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -17,22 +17,17 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}.
|
||||
@@ -40,44 +35,40 @@ import com.mongodb.client.MongoClients;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoConfiguration
|
||||
*/
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
@Configuration
|
||||
public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
* @return
|
||||
*/
|
||||
public MongoClient mongoClient() {
|
||||
return createMongoClient(mongoClientSettings());
|
||||
}
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @see #mongoDbFactory()
|
||||
* @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
|
||||
return new MongoTemplate(databaseFactory, converter);
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the
|
||||
* {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}.
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter)
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDatabaseFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -100,32 +91,22 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given
|
||||
* {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,121 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.MongoClient}.
|
||||
* <p />
|
||||
* <strong>INFO:</strong>In case you want to use {@link com.mongodb.client.MongoClients} for configuration please refer
|
||||
* to {@link AbstractMongoClientConfiguration}.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Ryan Tenney
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoClientConfiguration
|
||||
* @deprecated since 2.2 in favor of {@link AbstractMongoClientConfiguration}.
|
||||
*/
|
||||
@Configuration
|
||||
@Deprecated
|
||||
public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -18,19 +18,13 @@ package org.springframework.data.mongodb.config;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for reactive Spring Data MongoDB configuration using JavaConfig.
|
||||
@@ -40,33 +34,25 @@ import com.mongodb.reactivestreams.client.MongoClients;
|
||||
* @since 2.0
|
||||
* @see MongoConfigurationSupport
|
||||
*/
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
@Configuration
|
||||
public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
*/
|
||||
public MongoClient reactiveMongoClient() {
|
||||
return createReactiveMongoClient(mongoClientSettings());
|
||||
}
|
||||
public abstract MongoClient reactiveMongoClient();
|
||||
|
||||
/**
|
||||
* Creates {@link ReactiveMongoOperations}.
|
||||
*
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MappingMongoConverter mongoConverter) {
|
||||
return new ReactiveMongoTemplate(databaseFactory, mongoConverter);
|
||||
public ReactiveMongoOperations reactiveMongoTemplate() throws Exception {
|
||||
return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -74,7 +60,7 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
* {@link MongoClient} instance configured in {@link #reactiveMongoClient()}.
|
||||
*
|
||||
* @see #reactiveMongoClient()
|
||||
* @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter)
|
||||
* @see #reactiveMongoTemplate()
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
@@ -84,31 +70,21 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext()
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(reactiveMongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createReactiveMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link com.mongodb.ConnectionString}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String connectionString) {
|
||||
|
||||
if (!StringUtils.hasText(connectionString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ConnectionString(connectionString));
|
||||
}
|
||||
}
|
||||
@@ -96,9 +96,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
String autoIndexCreation = element.getAttribute("auto-index-creation");
|
||||
boolean autoIndexCreationEnabled = StringUtils.hasText(autoIndexCreation) && Boolean.valueOf(autoIndexCreation);
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element));
|
||||
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
@@ -202,11 +199,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) {
|
||||
return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false);
|
||||
}
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) {
|
||||
|
||||
String ctxRef = element.getAttribute("mapping-context-ref");
|
||||
|
||||
@@ -234,8 +226,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation);
|
||||
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
|
||||
@@ -50,11 +50,10 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
|
||||
ParsingUtils.setPropertyValue(builder, element, "port", "port");
|
||||
ParsingUtils.setPropertyValue(builder, element, "host", "host");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credential", "credential");
|
||||
ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet");
|
||||
ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials");
|
||||
|
||||
MongoParsingUtils.parseMongoClientSettings(element, builder);
|
||||
MongoParsingUtils.parseMongoClientOptions(element, builder);
|
||||
MongoParsingUtils.parseReplicaSet(element, builder);
|
||||
|
||||
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME;
|
||||
|
||||
@@ -63,34 +62,22 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId);
|
||||
parserContext.registerBeanComponent(mongoComponent);
|
||||
|
||||
BeanComponentDefinition connectionStringPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(connectionStringPropertyEditor);
|
||||
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder());
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils
|
||||
.getServerAddressPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(serverAddressPropertyEditor);
|
||||
|
||||
BeanComponentDefinition writeConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder());
|
||||
BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils
|
||||
.getWriteConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(writeConcernEditor);
|
||||
|
||||
BeanComponentDefinition readConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readConcernEditor);
|
||||
|
||||
BeanComponentDefinition readPreferenceEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder());
|
||||
BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils
|
||||
.getReadPreferencePropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readPreferenceEditor);
|
||||
|
||||
BeanComponentDefinition credentialsEditor = helper
|
||||
.getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor());
|
||||
BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils
|
||||
.getMongoCredentialPropertyEditor());
|
||||
parserContext.registerBeanComponent(credentialsEditor);
|
||||
|
||||
BeanComponentDefinition uuidRepresentationEditor = helper
|
||||
.getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder());
|
||||
parserContext.registerBeanComponent(uuidRepresentationEditor);
|
||||
|
||||
parserContext.popAndRegisterContainingComponent();
|
||||
|
||||
return mongoComponent.getBeanDefinition();
|
||||
|
||||
@@ -20,7 +20,6 @@ import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
@@ -32,15 +31,11 @@ import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingSt
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB to be extended for JavaConfiguration usage.
|
||||
*
|
||||
@@ -80,12 +75,11 @@ public abstract class MongoConfigurationSupport {
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
@@ -94,30 +88,14 @@ public abstract class MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the
|
||||
* {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link #mongoMappingContext()}.
|
||||
* Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
* <p>
|
||||
* <strong>NOTE:</strong> Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB
|
||||
* native simple types and register custom {@link Converter converters}.
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
* {@link #mongoMappingContext()}. Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public MongoCustomConversions customConversions() {
|
||||
return MongoCustomConversions.create(this::configureConverters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration hook for {@link MongoCustomConversions} creation.
|
||||
*
|
||||
* @param converterConfigurationAdapter never {@literal null}.
|
||||
* @since 2.3
|
||||
* @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs()
|
||||
* @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs()
|
||||
*/
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
public CustomConversions customConversions() {
|
||||
return new MongoCustomConversions(Collections.emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -199,36 +177,11 @@ public abstract class MongoConfigurationSupport {
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @return {@literal true} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will be set to {@literal false}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}. <br />
|
||||
* Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings mongoClientSettings() {
|
||||
|
||||
MongoClientSettings.Builder builder = MongoClientSettings.builder();
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
configureClientSettings(builder);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure {@link MongoClientSettings} via its {@link Builder} API.
|
||||
*
|
||||
* @param builder never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
// customization hook
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,12 +32,14 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s.
|
||||
@@ -82,11 +84,10 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
// Common setup
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(SimpleMongoClientDatabaseFactory.class);
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
|
||||
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
BeanDefinition mongoUri = getConnectionString(element, parserContext);
|
||||
BeanDefinition mongoUri = getMongoUri(element, parserContext);
|
||||
|
||||
if (mongoUri != null) {
|
||||
|
||||
@@ -96,8 +97,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-client-ref");
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
// Defaulting
|
||||
@@ -119,8 +119,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the
|
||||
* name under which the {@link com.mongodb.client.MongoClient} instance was registered under.
|
||||
* Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the
|
||||
* {@link Mongo} instance was registered under.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param parserContext must not be {@literal null}.
|
||||
@@ -136,7 +136,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes. <br />
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured
|
||||
* attributes. <br />
|
||||
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except
|
||||
* {@literal write-concern} and/or {@literal id}.
|
||||
*
|
||||
@@ -145,19 +146,11 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
* @return {@literal null} in case no client-/uri defined.
|
||||
*/
|
||||
@Nullable
|
||||
private BeanDefinition getConnectionString(Element element, ParserContext parserContext) {
|
||||
private BeanDefinition getMongoUri(Element element, ParserContext parserContext) {
|
||||
|
||||
String type = null;
|
||||
boolean hasClientUri = element.hasAttribute("client-uri");
|
||||
|
||||
if (element.hasAttribute("client-uri")) {
|
||||
type = "client-uri";
|
||||
} else if (element.hasAttribute("connection-string")) {
|
||||
type = "connection-string";
|
||||
} else if (element.hasAttribute("uri")) {
|
||||
type = "uri";
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(type)) {
|
||||
if (!hasClientUri && !element.hasAttribute("uri")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -171,12 +164,16 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||
parserContext.getReaderContext().error(
|
||||
"Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class);
|
||||
builder.addConstructorArgValue(element.getAttribute(type));
|
||||
Class<?> type = MongoClientURI.class;
|
||||
String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type);
|
||||
builder.addConstructorArgValue(uri);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -43,78 +43,60 @@ abstract class MongoParsingUtils {
|
||||
private MongoParsingUtils() {}
|
||||
|
||||
/**
|
||||
* Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper
|
||||
* attributes.
|
||||
*
|
||||
* @param element
|
||||
* @param mongoClientBuilder
|
||||
* Parses the mongo replica-set element.
|
||||
*
|
||||
* @param parserContext the parser context
|
||||
* @param element the mongo element
|
||||
* @param mongoBuilder the bean definition builder to populate
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) {
|
||||
setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds");
|
||||
}
|
||||
|
||||
Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings");
|
||||
if (settingsElement == null) {
|
||||
/**
|
||||
* Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper
|
||||
* attributes.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param mongoClientBuilder must not be {@literal null}.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
|
||||
Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options");
|
||||
|
||||
if (optionsElement == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoClientSettingsFactoryBean.class);
|
||||
.genericBeanDefinition(MongoClientOptionsFactoryBean.class);
|
||||
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier",
|
||||
"threadsAllowedToBlockForConnectionMultiplier");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout");
|
||||
|
||||
// SocketSettings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize");
|
||||
|
||||
// Server Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency",
|
||||
"serverHeartbeatFrequencyMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency",
|
||||
"serverMinHeartbeatFrequencyMS");
|
||||
|
||||
// Cluster Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout",
|
||||
"clusterServerSelectionTimeoutMS");
|
||||
|
||||
// Connection Pool Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time",
|
||||
"poolMaxConnectionLifeTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time",
|
||||
"poolMaxConnectionIdleTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay",
|
||||
"poolMaintenanceInitialDelayMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency",
|
||||
"poolMaintenanceFrequencyMS");
|
||||
|
||||
// SSL Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed",
|
||||
"sslInvalidHostNameAllowed");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider");
|
||||
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -136,24 +118,6 @@ abstract class MongoParsingUtils {
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ReadConcernPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* One should only register one bean definition but want to have the convenience of using
|
||||
* AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the
|
||||
@@ -161,7 +125,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() {
|
||||
|
||||
Map<String, String> customEditors = new ManagedMap<>();
|
||||
Map<String, String> customEditors = new ManagedMap<String, String>();
|
||||
customEditors.put("com.mongodb.ServerAddress[]",
|
||||
"org.springframework.data.mongodb.config.ServerAddressPropertyEditor");
|
||||
|
||||
@@ -179,7 +143,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<String, Class<?>>();
|
||||
customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
@@ -205,41 +169,4 @@ abstract class MongoParsingUtils {
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadConcernLevel;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the
|
||||
* {@link ReadConcernLevel#fromString(String)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readConcernString) {
|
||||
|
||||
if (!StringUtils.hasText(readConcernString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString)));
|
||||
}
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link UuidRepresentation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String value) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(UuidRepresentation.valueOf(value));
|
||||
}
|
||||
}
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.WriteConcern;
|
||||
public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* Parse a string to a {@link WriteConcern}.
|
||||
* Parse a string to a List<ServerAddress>
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String writeConcernString) {
|
||||
@@ -51,5 +51,6 @@ public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
// pass on the string to the constructor
|
||||
setValue(new WriteConcern(writeConcernString));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,227 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Value object representing a count query. Count queries using {@code $near} or {@code $nearSphere} require a rewrite
|
||||
* to {@code $geoWithin}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class CountQuery {
|
||||
|
||||
private Document source;
|
||||
|
||||
private CountQuery(Document source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public static CountQuery of(Document source) {
|
||||
return new CountQuery(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the query {@link Document} that can be used with {@code countDocuments()}. Potentially rewrites the query
|
||||
* to be usable with {@code countDocuments()}.
|
||||
*
|
||||
* @return the query {@link Document} that can be used with {@code countDocuments()}.
|
||||
*/
|
||||
public Document toQueryDocument() {
|
||||
|
||||
if (!requiresRewrite(source)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
Document target = new Document();
|
||||
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Document theValue = (Document) entry.getValue();
|
||||
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Collection<?> source = (Collection<?>) entry.getValue();
|
||||
|
||||
target.put(entry.getKey(), rewriteCollection(source));
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("$and".equals(entry.getKey()) && target.containsKey("$and")) {
|
||||
// Expect $and to be processed with Document and createGeoWithin.
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param valueToInspect
|
||||
* @return {@code true} if the enclosing element needs to be rewritten.
|
||||
*/
|
||||
private boolean requiresRewrite(Object valueToInspect) {
|
||||
|
||||
if (valueToInspect instanceof Document) {
|
||||
return requiresRewrite((Document) valueToInspect);
|
||||
}
|
||||
|
||||
if (valueToInspect instanceof Collection) {
|
||||
return requiresRewrite((Collection) valueToInspect);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Collection<?> collection) {
|
||||
|
||||
for (Object o : collection) {
|
||||
if (o instanceof Document && requiresRewrite((Document) o)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Document document) {
|
||||
|
||||
if (containsNear(document)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (Object entry : document.values()) {
|
||||
|
||||
if (requiresRewrite(entry)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Collection<Object> rewriteCollection(Collection<?> source) {
|
||||
|
||||
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
||||
|
||||
for (Object item : source) {
|
||||
if (item instanceof Document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
|
||||
} else {
|
||||
rewrittenCollection.add(item);
|
||||
}
|
||||
}
|
||||
|
||||
return rewrittenCollection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrite the near query for field {@code key} to {@code $geoWithin}.
|
||||
*
|
||||
* @param key the queried field.
|
||||
* @param source source {@link Document}.
|
||||
* @param $and potentially existing {@code $and} condition.
|
||||
* @return the rewritten query {@link Document}.
|
||||
*/
|
||||
private static Document createGeoWithin(String key, Document source, @Nullable Object $and) {
|
||||
|
||||
boolean spheric = source.containsKey("$nearSphere");
|
||||
Object $near = spheric ? source.get("$nearSphere") : source.get("$near");
|
||||
|
||||
Number maxDistance = source.containsKey("$maxDistance") ? (Number) source.get("$maxDistance") : Double.MAX_VALUE;
|
||||
List<Object> $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance);
|
||||
Document $geoWithinMax = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMax));
|
||||
|
||||
if (!containsNearWithMinDistance(source)) {
|
||||
return new Document(key, $geoWithinMax);
|
||||
}
|
||||
|
||||
Number minDistance = (Number) source.get("$minDistance");
|
||||
List<Object> $centerMin = Arrays.asList(toCenterCoordinates($near), minDistance);
|
||||
Document $geoWithinMin = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMin));
|
||||
|
||||
List<Document> criteria = new ArrayList<>();
|
||||
|
||||
if ($and != null) {
|
||||
if ($and instanceof Collection) {
|
||||
criteria.addAll((Collection) $and);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
|
||||
+ $and);
|
||||
}
|
||||
}
|
||||
|
||||
criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin))));
|
||||
criteria.add(new Document(key, $geoWithinMax));
|
||||
return new Document("$and", criteria);
|
||||
}
|
||||
|
||||
private static boolean containsNear(Document source) {
|
||||
return source.containsKey("$near") || source.containsKey("$nearSphere");
|
||||
}
|
||||
|
||||
private static boolean containsNearWithMinDistance(Document source) {
|
||||
|
||||
if (!containsNear(source)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return source.containsKey("$minDistance");
|
||||
}
|
||||
|
||||
private static Object toCenterCoordinates(Object value) {
|
||||
|
||||
if (ObjectUtils.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Point) {
|
||||
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
|
||||
}
|
||||
|
||||
if (value instanceof Document && ((Document) value).containsKey("x")) {
|
||||
|
||||
Document point = (Document) value;
|
||||
return Arrays.asList(point.get("x"), point.get("y"));
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
@@ -32,7 +32,6 @@ import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
@@ -63,7 +62,6 @@ import com.mongodb.client.model.*;
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -302,7 +300,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
@@ -424,52 +421,38 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
if (null != bulkOperationContext.getEventPublisher()) {
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
}
|
||||
|
||||
@@ -492,16 +475,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -64,7 +64,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
this(mongoDbFactory, collectionName, queryMapper, null);
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
|
||||
/**
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}.
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -29,14 +29,14 @@ import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
*/
|
||||
class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final QueryMapper mapper;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) {
|
||||
DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory, QueryMapper mapper) {
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mapper = mapper;
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -44,7 +43,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
@@ -171,18 +170,6 @@ public interface ExecutableFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -304,21 +291,9 @@ public interface ExecutableFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,7 +19,6 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -31,7 +30,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -45,7 +44,6 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ExecutableMapReduceOperation {
|
||||
@@ -148,18 +146,6 @@ public interface ExecutableMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -120,18 +119,6 @@ public interface ExecutableRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,11 +17,8 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
@@ -154,16 +151,13 @@ public interface ExecutableUpdateOperation {
|
||||
interface UpdateWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Set the {@link UpdateDefinition} to be applied.
|
||||
* Set the {@link Update} to be applied.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingUpdate}.
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
TerminatingUpdate<T> apply(UpdateDefinition update);
|
||||
TerminatingUpdate<T> apply(Update update);
|
||||
|
||||
/**
|
||||
* Specify {@code replacement} object.
|
||||
@@ -211,18 +205,6 @@ public interface ExecutableUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,7 +21,7 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -67,7 +67,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class domainType;
|
||||
Query query;
|
||||
@Nullable UpdateDefinition update;
|
||||
@Nullable Update update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@@ -76,10 +76,10 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(Update)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingUpdate<T> apply(UpdateDefinition update) {
|
||||
public TerminatingUpdate<T> apply(Update update) {
|
||||
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
|
||||
@@ -80,11 +80,7 @@ public class MappedDocument {
|
||||
}
|
||||
|
||||
public Bson getIdFilter() {
|
||||
return new Document(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public Object get(String key) {
|
||||
return document.get(key);
|
||||
return Filters.eq(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public UpdateDefinition updateWithoutId() {
|
||||
|
||||
@@ -20,7 +20,7 @@ import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
import org.springframework.jmx.export.annotation.ManagedResource;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -34,13 +34,24 @@ import com.mongodb.client.MongoDatabase;
|
||||
@ManagedResource(description = "Mongo Admin Operations")
|
||||
public class MongoAdmin implements MongoAdminOperations {
|
||||
|
||||
private final MongoClient mongoClient;
|
||||
private final Object mongoClient;
|
||||
|
||||
/**
|
||||
* @param mongoClient
|
||||
* @deprecated since 2.2 in favor of {@link MongoAdmin#MongoAdmin(com.mongodb.client.MongoClient)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoAdmin(MongoClient mongoClient) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
this.mongoClient = mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param client the underlying {@link com.mongodb.client.MongoClient} used for data access.
|
||||
* @since 2.2
|
||||
*/
|
||||
public MongoAdmin(MongoClient client) {
|
||||
public MongoAdmin(com.mongodb.client.MongoClient client) {
|
||||
|
||||
Assert.notNull(client, "Client must not be null!");
|
||||
this.mongoClient = client;
|
||||
@@ -77,6 +88,11 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
}
|
||||
|
||||
MongoDatabase getDB(String databaseName) {
|
||||
return mongoClient.getDatabase(databaseName);
|
||||
|
||||
if (mongoClient instanceof MongoClient) {
|
||||
return ((MongoClient) mongoClient).getDatabase(databaseName);
|
||||
}
|
||||
|
||||
return ((com.mongodb.client.MongoClient) mongoClient).getDatabase(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,72 +16,70 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.event.ClusterListener;
|
||||
|
||||
/**
|
||||
* Convenient factory for configuring MongoDB.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2 - There is no replacement for this {@link org.springframework.beans.factory.FactoryBean} at
|
||||
* this time. However moving forward the {@link org.springframework.beans.factory.FactoryBean} will be
|
||||
* suitable to provide instances of {@link com.mongodb.client.MongoClient}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator();
|
||||
|
||||
private @Nullable MongoClientSettings mongoClientSettings;
|
||||
private @Nullable MongoClientOptions mongoClientOptions;
|
||||
private @Nullable String host;
|
||||
private @Nullable Integer port;
|
||||
private @Nullable List<MongoCredential> credential = null;
|
||||
private @Nullable ConnectionString connectionString;
|
||||
private @Nullable String replicaSet = null;
|
||||
private List<ServerAddress> replicaSetSeeds = Collections.emptyList();
|
||||
private List<MongoCredential> credentials = Collections.emptyList();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}.
|
||||
* Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClientOptions
|
||||
*/
|
||||
public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) {
|
||||
this.mongoClientSettings = mongoClientOptions;
|
||||
public void setMongoClientOptions(@Nullable MongoClientOptions mongoClientOptions) {
|
||||
this.mongoClientOptions = mongoClientOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of credentials to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param credential can be {@literal null}.
|
||||
* @param credentials can be {@literal null}.
|
||||
*/
|
||||
public void setCredential(@Nullable MongoCredential[] credential) {
|
||||
this.credential = Arrays.asList(credential);
|
||||
public void setCredentials(@Nullable MongoCredential[] credentials) {
|
||||
this.credentials = filterNonNullElementsAsList(credentials);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of {@link ServerAddress} to build up a replica set for.
|
||||
*
|
||||
* @param replicaSetSeeds can be {@literal null}.
|
||||
*/
|
||||
public void setReplicaSetSeeds(@Nullable ServerAddress[] replicaSetSeeds) {
|
||||
this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -102,14 +100,6 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
public void setConnectionString(@Nullable ConnectionString connectionString) {
|
||||
this.connectionString = connectionString;
|
||||
}
|
||||
|
||||
public void setReplicaSet(@Nullable String replicaSet) {
|
||||
this.replicaSet = replicaSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to use.
|
||||
*
|
||||
@@ -142,198 +132,12 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
*/
|
||||
@Override
|
||||
protected MongoClient createInstance() throws Exception {
|
||||
return createMongoClient(computeClientSetting());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link MongoClientSettings} based on configuration and priority (lower is better).
|
||||
* <ol>
|
||||
* <li>{@link MongoClientFactoryBean#mongoClientSettings}</li>
|
||||
* <li>{@link MongoClientFactoryBean#connectionString}</li>
|
||||
* <li>default {@link MongoClientSettings}</li>
|
||||
* </ol>
|
||||
*
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings computeClientSetting() {
|
||||
|
||||
if (connectionString != null && (StringUtils.hasText(host) || port != null)) {
|
||||
throw new IllegalStateException("ConnectionString and host/port configuration exclude one another!");
|
||||
if (mongoClientOptions == null) {
|
||||
mongoClientOptions = MongoClientOptions.builder().build();
|
||||
}
|
||||
|
||||
ConnectionString connectionString = this.connectionString != null ? this.connectionString
|
||||
: new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()),
|
||||
getOrDefault(port, "" + ServerAddress.defaultPort())));
|
||||
|
||||
Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString);
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
|
||||
if (mongoClientSettings != null) {
|
||||
|
||||
MongoClientSettings defaultSettings = MongoClientSettings.builder().build();
|
||||
|
||||
SslSettings sslSettings = mongoClientSettings.getSslSettings();
|
||||
ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings();
|
||||
ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings();
|
||||
SocketSettings socketSettings = mongoClientSettings.getSocketSettings();
|
||||
ServerSettings serverSettings = mongoClientSettings.getServerSettings();
|
||||
|
||||
builder = builder //
|
||||
.applicationName(computeSettingsValue(defaultSettings.getApplicationName(),
|
||||
mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) //
|
||||
.applyToSslSettings(settings -> {
|
||||
|
||||
applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled()));
|
||||
applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed())));
|
||||
settings.context(sslSettings.getContext());
|
||||
}).applyToClusterSettings(settings -> {
|
||||
|
||||
applySettings(settings::hosts,
|
||||
computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList())));
|
||||
|
||||
applySettings(settings::requiredReplicaSetName,
|
||||
computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(),
|
||||
clusterSettings, connectionString.getRequiredReplicaSetName()));
|
||||
|
||||
applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold()));
|
||||
|
||||
applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getServerSelectionTimeout()));
|
||||
|
||||
applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
List<ClusterListener> clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null);
|
||||
if (clusterListeners != null) {
|
||||
clusterListeners.forEach(settings::addClusterListener);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionIdleTime()));
|
||||
|
||||
applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionLifeTime()));
|
||||
|
||||
applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxWaitTime()));
|
||||
|
||||
applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue(
|
||||
(ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(settings::minSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMinConnectionPoolSize()));
|
||||
applySettings(settings::maxSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMaxConnectionPoolSize()));
|
||||
}) //
|
||||
.applyToSocketSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout()));
|
||||
|
||||
applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout()));
|
||||
applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
}) //
|
||||
.applyToServerSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, null));
|
||||
|
||||
applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency()));
|
||||
settings.applySettings(serverSettings);
|
||||
}) //
|
||||
.autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) //
|
||||
.codecRegistry(mongoClientSettings.getCodecRegistry()); //
|
||||
|
||||
applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(),
|
||||
mongoClientSettings.getReadConcern(), connectionString.getReadConcern()));
|
||||
applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(),
|
||||
mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern()));
|
||||
applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(),
|
||||
mongoClientSettings.getReadPreference(), connectionString.getReadPreference()));
|
||||
applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(),
|
||||
mongoClientSettings.getRetryReads(), connectionString.getRetryReads()));
|
||||
applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(),
|
||||
mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue()));
|
||||
applySettings(builder::uuidRepresentation,
|
||||
computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY));
|
||||
}
|
||||
|
||||
if (!CollectionUtils.isEmpty(credential)) {
|
||||
builder = builder.credential(credential.iterator().next());
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(replicaSet)) {
|
||||
builder.applyToClusterSettings((settings) -> {
|
||||
settings.requiredReplicaSetName(replicaSet);
|
||||
});
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private <T> void applySettings(Consumer<T> settingsBuilder, @Nullable T value) {
|
||||
|
||||
if (ObjectUtils.isEmpty(value)) {
|
||||
return;
|
||||
}
|
||||
settingsBuilder.accept(value);
|
||||
}
|
||||
|
||||
private <S, T> T computeSettingsValue(Function<S, T> function, S defaultValueHolder, S settingsValueHolder,
|
||||
@Nullable T connectionStringValue) {
|
||||
return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder),
|
||||
connectionStringValue);
|
||||
}
|
||||
|
||||
private <T> T computeSettingsValue(T defaultValue, T fromSettings, T fromConnectionString) {
|
||||
|
||||
boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings);
|
||||
boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString);
|
||||
|
||||
if (!fromSettingsIsDefault) {
|
||||
return fromSettings;
|
||||
}
|
||||
return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue;
|
||||
return createMongoClient();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -348,11 +152,43 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
}
|
||||
}
|
||||
|
||||
private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
private MongoClient createMongoClient() throws UnknownHostException {
|
||||
|
||||
if (!CollectionUtils.isEmpty(replicaSetSeeds)) {
|
||||
return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
private String getOrDefault(Object value, String defaultValue) {
|
||||
return !StringUtils.isEmpty(value) ? value.toString() : defaultValue;
|
||||
private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException {
|
||||
|
||||
ServerAddress defaultAddress = new ServerAddress();
|
||||
|
||||
return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(),
|
||||
port != null ? port.intValue() : defaultAddress.getPort());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given array as {@link List} with all {@literal null} elements removed.
|
||||
*
|
||||
* @param elements the elements to filter <T>, can be {@literal null}.
|
||||
* @return a new unmodifiable {@link List#} from the given elements without {@literal null}s.
|
||||
*/
|
||||
private static <T> List<T> filterNonNullElementsAsList(@Nullable T[] elements) {
|
||||
|
||||
if (elements == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<T> candidateElements = new ArrayList<T>();
|
||||
|
||||
for (T element : elements) {
|
||||
if (element != null) {
|
||||
candidateElements.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableList(candidateElements);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,338 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.DBDecoderFactory;
|
||||
import com.mongodb.DBEncoderFactory;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientOptions} instance.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2 - There is no replacement for this {@link org.springframework.beans.factory.FactoryBean}.
|
||||
* However moving forward there will be a dedicated factory bean for {@link com.mongodb.MongoClientSettings}
|
||||
* replacing {@link MongoClientOptions}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClientOptions> {
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
// TODO: Mongo Driver 4 - use application name insetad of description if not available
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getApplicationName();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
.getThreadsAllowedToBlockForConnectionMultiplier();
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime();
|
||||
private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime();
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
|
||||
// TODO: Mongo Driver 4 - check if available
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory();
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
|
||||
// TODO: Mongo Driver 4 - remove this option
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout();
|
||||
private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout();
|
||||
private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName();
|
||||
private int serverSelectionTimeout = DEFAULT_MONGO_OPTIONS.getServerSelectionTimeout();
|
||||
|
||||
private boolean ssl;
|
||||
private @Nullable SSLSocketFactory sslSocketFactory;
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClient} description.
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
// TODO: Mongo Driver 4 - deprecate that one and add application name
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum number of connections per host.
|
||||
*
|
||||
* @param minConnectionsPerHost
|
||||
*/
|
||||
public void setMinConnectionsPerHost(int minConnectionsPerHost) {
|
||||
this.minConnectionsPerHost = minConnectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of connections allowed per host. Will block if run out. Default is 10. System property
|
||||
* {@code MONGO.POOLSIZE} can override
|
||||
*
|
||||
* @param connectionsPerHost
|
||||
*/
|
||||
public void setConnectionsPerHost(int connectionsPerHost) {
|
||||
this.connectionsPerHost = connectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is
|
||||
* 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an
|
||||
* exception will be thrown.
|
||||
*
|
||||
* @param threadsAllowedToBlockForConnectionMultiplier
|
||||
*/
|
||||
public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) {
|
||||
this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
*
|
||||
* @param maxWaitTime
|
||||
*/
|
||||
public void setMaxWaitTime(int maxWaitTime) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum idle time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionIdleTime
|
||||
*/
|
||||
public void setMaxConnectionIdleTime(int maxConnectionIdleTime) {
|
||||
this.maxConnectionIdleTime = maxConnectionIdleTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum life time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionLifeTime
|
||||
*/
|
||||
public void setMaxConnectionLifeTime(int maxConnectionLifeTime) {
|
||||
this.maxConnectionLifeTime = maxConnectionLifeTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout in milliseconds. 0 is default and infinite.
|
||||
*
|
||||
* @param connectTimeout
|
||||
*/
|
||||
public void setConnectTimeout(int connectTimeout) {
|
||||
this.connectTimeout = connectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout. 0 is default and infinite.
|
||||
*
|
||||
* @param socketTimeout
|
||||
*/
|
||||
public void setSocketTimeout(int socketTimeout) {
|
||||
this.socketTimeout = socketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
*
|
||||
* @param socketKeepAlive
|
||||
*/
|
||||
public void setSocketKeepAlive(boolean socketKeepAlive) {
|
||||
this.socketKeepAlive = socketKeepAlive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(@Nullable ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB
|
||||
* object.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(@Nullable WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketFactory
|
||||
*/
|
||||
public void setSocketFactory(@Nullable SocketFactory socketFactory) {
|
||||
this.socketFactory = socketFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the frequency that the driver will attempt to determine the current state of each server in the cluster.
|
||||
*
|
||||
* @param heartbeatFrequency
|
||||
*/
|
||||
public void setHeartbeatFrequency(int heartbeatFrequency) {
|
||||
this.heartbeatFrequency = heartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* In the event that the driver has to frequently re-check a server's availability, it will wait at least this long
|
||||
* since the previous check to avoid wasted effort.
|
||||
*
|
||||
* @param minHeartbeatFrequency
|
||||
*/
|
||||
public void setMinHeartbeatFrequency(int minHeartbeatFrequency) {
|
||||
this.minHeartbeatFrequency = minHeartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatConnectTimeout
|
||||
*/
|
||||
public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) {
|
||||
this.heartbeatConnectTimeout = heartbeatConnectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatSocketTimeout
|
||||
*/
|
||||
public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) {
|
||||
this.heartbeatSocketTimeout = heartbeatSocketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the name of the replica set.
|
||||
*
|
||||
* @param requiredReplicaSetName
|
||||
*/
|
||||
public void setRequiredReplicaSetName(String requiredReplicaSetName) {
|
||||
this.requiredReplicaSetName = requiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to {@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
public void setSsl(boolean ssl) {
|
||||
this.ssl = ssl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here,
|
||||
* {@link SSLSocketFactory#getDefault()} will be used.
|
||||
*
|
||||
* @param sslSocketFactory
|
||||
*/
|
||||
public void setSslSocketFactory(@Nullable SSLSocketFactory sslSocketFactory) {
|
||||
|
||||
this.sslSocketFactory = sslSocketFactory;
|
||||
this.ssl = sslSocketFactory != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@literal server selection timeout} in msec for a 3.x MongoDB Java driver. If not set the default value of
|
||||
* 30 sec will be used. A value of 0 means that it will timeout immediately if no server is available. A negative
|
||||
* value means to wait indefinitely.
|
||||
*
|
||||
* @param serverSelectionTimeout in msec.
|
||||
*/
|
||||
public void setServerSelectionTimeout(int serverSelectionTimeout) {
|
||||
this.serverSelectionTimeout = serverSelectionTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link AutoEncryptionSettings} to be used.
|
||||
*
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||
*/
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@Override
|
||||
protected MongoClientOptions createInstance() throws Exception {
|
||||
|
||||
SocketFactory socketFactoryToUse = ssl
|
||||
? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault())
|
||||
: this.socketFactory;
|
||||
|
||||
return MongoClientOptions.builder() //
|
||||
.alwaysUseMBeans(this.alwaysUseMBeans) //
|
||||
.connectionsPerHost(this.connectionsPerHost) //
|
||||
.connectTimeout(connectTimeout) //
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.applicationName(description) // TODO: Mongo Driver 4 - use application name if description not available
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
.maxConnectionIdleTime(maxConnectionIdleTime) //
|
||||
.maxConnectionLifeTime(maxConnectionLifeTime) //
|
||||
.maxWaitTime(maxWaitTime) //
|
||||
.minConnectionsPerHost(minConnectionsPerHost) //
|
||||
.minHeartbeatFrequency(minHeartbeatFrequency) //
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.sslEnabled(ssl) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 -
|
||||
.socketKeepAlive(socketKeepAlive) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientOptions.class;
|
||||
}
|
||||
}
|
||||
@@ -1,486 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private @Nullable Boolean retryReads = null;
|
||||
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private @Nullable Boolean retryWrites = null;
|
||||
|
||||
private @Nullable String applicationName = null;
|
||||
|
||||
private @Nullable UuidRepresentation uUidRepresentation = null;
|
||||
|
||||
// --> Socket Settings
|
||||
|
||||
private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings()
|
||||
.getConnectTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize();
|
||||
private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize();
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost();
|
||||
private List<ServerAddress> clusterHosts = Collections.emptyList();
|
||||
private @Nullable ClusterConnectionMode clusterConnectionMode = null;
|
||||
private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType();
|
||||
private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getRequiredReplicaSetName();
|
||||
private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getLocalThreshold(TimeUnit.MILLISECONDS);
|
||||
private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getServerSelectionTimeout(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize();
|
||||
private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize();
|
||||
private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxWaitTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled();
|
||||
private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed();
|
||||
private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled()
|
||||
? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName()
|
||||
: "";
|
||||
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) {
|
||||
this.socketConnectTimeoutMS = socketConnectTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReadTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketReadTimeoutMS(int socketReadTimeoutMS) {
|
||||
this.socketReadTimeoutMS = socketReadTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReceiveBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int)
|
||||
*/
|
||||
public void setSocketReceiveBufferSize(int socketReceiveBufferSize) {
|
||||
this.socketReceiveBufferSize = socketReceiveBufferSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketSendBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int)
|
||||
*/
|
||||
public void setSocketSendBufferSize(int socketSendBufferSize) {
|
||||
this.socketSendBufferSize = socketSendBufferSize;
|
||||
}
|
||||
|
||||
// --> Server Settings
|
||||
|
||||
private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
/**
|
||||
* @param serverHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) {
|
||||
this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverMinHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) {
|
||||
this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
/**
|
||||
* @param clusterSrvHost
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String)
|
||||
*/
|
||||
public void setClusterSrvHost(String clusterSrvHost) {
|
||||
this.clusterSrvHost = clusterSrvHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterHosts
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#hosts(List)
|
||||
*/
|
||||
public void setClusterHosts(ServerAddress[] clusterHosts) {
|
||||
this.clusterHosts = Arrays.asList(clusterHosts);
|
||||
}
|
||||
|
||||
/**
|
||||
* ????
|
||||
*
|
||||
* @param clusterConnectionMode
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode)
|
||||
*/
|
||||
public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) {
|
||||
this.clusterConnectionMode = clusterConnectionMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param custerRequiredClusterType
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType)
|
||||
*/
|
||||
public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) {
|
||||
this.custerRequiredClusterType = custerRequiredClusterType;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterRequiredReplicaSetName
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String)
|
||||
*/
|
||||
public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) {
|
||||
this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterLocalThresholdMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) {
|
||||
this.clusterLocalThresholdMS = clusterLocalThresholdMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterServerSelectionTimeoutMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) {
|
||||
this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS;
|
||||
}
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
/**
|
||||
* @param poolMaxSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int)
|
||||
*/
|
||||
public void setPoolMaxSize(int poolMaxSize) {
|
||||
this.poolMaxSize = poolMaxSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMinSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int)
|
||||
*/
|
||||
public void setPoolMinSize(int poolMinSize) {
|
||||
this.poolMinSize = poolMinSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxWaitTimeMS in mesec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) {
|
||||
this.poolMaxWaitTimeMS = poolMaxWaitTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionLifeTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) {
|
||||
this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionIdleTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) {
|
||||
this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceInitialDelayMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) {
|
||||
this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) {
|
||||
this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS;
|
||||
}
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
/**
|
||||
* @param sslEnabled
|
||||
* @see com.mongodb.connection.SslSettings.Builder#enabled(boolean)
|
||||
*/
|
||||
public void setSslEnabled(Boolean sslEnabled) {
|
||||
this.sslEnabled = sslEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslInvalidHostNameAllowed
|
||||
* @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean)
|
||||
*/
|
||||
public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) {
|
||||
this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslProvider
|
||||
* @see com.mongodb.connection.SslSettings.Builder#context(SSLContext)
|
||||
* @see SSLContext#getInstance(String)
|
||||
*/
|
||||
public void setSslProvider(String sslProvider) {
|
||||
this.sslProvider = sslProvider;
|
||||
}
|
||||
|
||||
// encryption and retry
|
||||
|
||||
/**
|
||||
* @param applicationName
|
||||
* @see MongoClientSettings.Builder#applicationName(String)
|
||||
*/
|
||||
public void setApplicationName(@Nullable String applicationName) {
|
||||
this.applicationName = applicationName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryReads
|
||||
* @see MongoClientSettings.Builder#retryReads(boolean)
|
||||
*/
|
||||
public void setRetryReads(@Nullable Boolean retryReads) {
|
||||
this.retryReads = retryReads;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readConcern
|
||||
* @see MongoClientSettings.Builder#readConcern(ReadConcern)
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param writeConcern
|
||||
* @see MongoClientSettings.Builder#writeConcern(WriteConcern)
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryWrites
|
||||
* @see MongoClientSettings.Builder#retryWrites(boolean)
|
||||
*/
|
||||
public void setRetryWrites(@Nullable Boolean retryWrites) {
|
||||
this.retryWrites = retryWrites;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readPreference
|
||||
* @see MongoClientSettings.Builder#readPreference(ReadPreference)
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param streamFactoryFactory
|
||||
* @see MongoClientSettings.Builder#streamFactoryFactory(StreamFactoryFactory)
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param codecRegistry
|
||||
* @see MongoClientSettings.Builder#codecRegistry(CodecRegistry)
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param uUidRepresentation
|
||||
*/
|
||||
public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) {
|
||||
this.uUidRepresentation = uUidRepresentation;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings)
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() {
|
||||
|
||||
Builder builder = MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.applicationName(applicationName) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.applyToClusterSettings((settings) -> {
|
||||
|
||||
settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
if (clusterConnectionMode != null) {
|
||||
settings.mode(clusterConnectionMode);
|
||||
}
|
||||
settings.requiredReplicaSetName(clusterRequiredReplicaSetName);
|
||||
|
||||
if (!CollectionUtils.isEmpty(clusterHosts)) {
|
||||
settings.hosts(clusterHosts);
|
||||
}
|
||||
settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(clusterMaxWaitQueueSize);
|
||||
settings.requiredClusterType(custerRequiredClusterType);
|
||||
|
||||
if (StringUtils.hasText(clusterSrvHost)) {
|
||||
settings.srvHost(clusterSrvHost);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings((settings) -> {
|
||||
|
||||
settings.minSize(poolMinSize);
|
||||
settings.maxSize(poolMaxSize);
|
||||
settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(poolMaxWaitQueueSize);
|
||||
settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToServerSettings((settings) -> {
|
||||
|
||||
settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToSocketSettings((settings) -> {
|
||||
|
||||
settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.receiveBufferSize(socketReceiveBufferSize);
|
||||
settings.sendBufferSize(socketSendBufferSize);
|
||||
}) //
|
||||
.applyToSslSettings((settings) -> {
|
||||
|
||||
settings.enabled(sslEnabled);
|
||||
if (sslEnabled) {
|
||||
|
||||
settings.invalidHostNameAllowed(sslInvalidHostNameAllowed);
|
||||
try {
|
||||
settings.context(
|
||||
StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault());
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (streamFactoryFactory != null) {
|
||||
builder = builder.streamFactoryFactory(streamFactoryFactory);
|
||||
}
|
||||
if (retryReads != null) {
|
||||
builder = builder.retryReads(retryReads);
|
||||
}
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
|
||||
if (uUidRepresentation != null) {
|
||||
builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,10 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteConcernResult;
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
/**
|
||||
* Mongo-specific {@link DataIntegrityViolationException}.
|
||||
@@ -29,18 +30,18 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
|
||||
private static final long serialVersionUID = -186980521176764046L;
|
||||
|
||||
private final WriteConcernResult writeResult;
|
||||
private final WriteResult writeResult;
|
||||
private final MongoActionOperation actionOperation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}.
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}.
|
||||
* @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}.
|
||||
* @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}.
|
||||
*/
|
||||
public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
public MongoDataIntegrityViolationException(String message, WriteResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
|
||||
super(message);
|
||||
|
||||
@@ -52,11 +53,11 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link WriteConcernResult} that caused the exception.
|
||||
* Returns the {@link WriteResult} that caused the exception.
|
||||
*
|
||||
* @return the writeResult
|
||||
*/
|
||||
public WriteConcernResult getWriteResult() {
|
||||
public WriteResult getWriteResult() {
|
||||
return writeResult;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,260 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 3.0
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
*/
|
||||
public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
* {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(getDefaultDatabaseName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDatabaseFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,11 +15,26 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} and {@link com.mongodb.MongoClient}
|
||||
* defining common properties such as database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
@@ -27,16 +42,22 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 2.1
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactorySupport} instead.
|
||||
* @see SimpleMongoDbFactory
|
||||
* @see SimpleMongoClientDbFactory
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySupport<C> {
|
||||
public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
@@ -45,6 +66,207 @@ public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySuppo
|
||||
*/
|
||||
protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, exceptionTranslator);
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return getDb(databaseName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return new MongoDbFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDbFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDbFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDbFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return delegate.getLegacyDb();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
@@ -36,6 +37,7 @@ import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
@@ -110,6 +112,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
@@ -129,7 +135,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
||||
return new MongoTransactionException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,6 @@ import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -41,13 +40,13 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -58,7 +57,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <p/>
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -176,7 +175,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <p/>
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
|
||||
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
|
||||
*
|
||||
@@ -212,7 +211,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* <p/>
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -223,10 +222,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
||||
* {@link com.mongodb.client.FindIterable}.
|
||||
* {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -239,10 +237,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed
|
||||
* by a Mongo DB {@link com.mongodb.client.FindIterable}.
|
||||
* by a Mongo DB {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -361,7 +358,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
IndexOperations indexOps(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level.
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level.
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
@@ -519,11 +516,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class. The name of the inputCollection is derived from the inputType of
|
||||
* the aggregation.
|
||||
* <p>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -538,11 +535,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class and are returned as stream. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -556,10 +553,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -575,10 +572,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -886,15 +883,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -902,16 +896,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -920,18 +911,15 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parametrized type.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -940,19 +928,16 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
@@ -1301,111 +1286,99 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult upsert(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
*
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult updateFirst(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1413,34 +1386,27 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the specified collection that matches the query document criteria with the
|
||||
* provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult updateMulti(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1448,16 +1414,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by {@literal id} and (if applicable) its
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,763 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
|
||||
/**
|
||||
* {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed.
|
||||
* This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options
|
||||
* for {@literal count}, {@literal remove}, and other methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class QueryOperations {
|
||||
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final EntityOperations entityOperations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
private final CodecRegistryProvider codecRegistryProvider;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final AggregationUtil aggregationUtil;
|
||||
private final Map<Class<?>, Document> mappedShardKey = new ConcurrentHashMap<>(1);
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link QueryOperations}.
|
||||
*
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @param updateMapper must not be {@literal null}.
|
||||
* @param entityOperations must not be {@literal null}.
|
||||
* @param propertyOperations must not be {@literal null}.
|
||||
* @param codecRegistryProvider must not be {@literal null}.
|
||||
*/
|
||||
QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations,
|
||||
PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.entityOperations = entityOperations;
|
||||
this.propertyOperations = propertyOperations;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.mappingContext = queryMapper.getMappingContext();
|
||||
this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link QueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
QueryContext createQueryContext(Query query) {
|
||||
return new QueryContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link DistinctQueryContext}.
|
||||
*/
|
||||
DistinctQueryContext distinctQueryContext(Query query, String fieldName) {
|
||||
return new DistinctQueryContext(query, fieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link CountContext}.
|
||||
*/
|
||||
CountContext countQueryContext(Query query) {
|
||||
return new CountContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting multiple documents.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, true, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param replacement the {@link MappedDocument mapped replacement} document.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) {
|
||||
return new UpdateContext(replacement, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance removing all matching documents.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteQueryContext(Query query) {
|
||||
return new DeleteContext(query, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance only the first matching document.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteSingleContext(Query query) {
|
||||
return new DeleteContext(query, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
|
||||
* representation, mapping fieldnames, as well as determinging and applying {@link Collation collations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class QueryContext {
|
||||
|
||||
private final Query query;
|
||||
|
||||
/**
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be eihter a {@link Query} or a
|
||||
* plain {@link Document}.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
private QueryContext(@Nullable Query query) {
|
||||
this.query = query != null ? query : new Query();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Query getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Document getQueryObject() {
|
||||
return query.getQueryObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the
|
||||
* given{@literal domainType}
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable Class<T> domainType,
|
||||
Function<Class<T>, MongoPersistentEntity<?>> entityLookup) {
|
||||
return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> entity) {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields, entity);
|
||||
} else {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields,
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity != null && entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped {@link Query#getSortObject() sort} option.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document getMappedSort(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedSort(query.getSortObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param consumer must not be {@literal null}.
|
||||
*/
|
||||
void applyCollation(@Nullable Class<?> domainType, Consumer<com.mongodb.client.model.Collation> consumer) {
|
||||
getCollation(domainType).ifPresent(consumer::accept);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Optional<com.mongodb.client.model.Collation> getCollation(@Nullable Class<?> domainType) {
|
||||
|
||||
return entityOperations.forType(domainType).getCollation(query) //
|
||||
.map(Collation::toMongoCollation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DistinctQueryContext extends QueryContext {
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param fieldName must not be {@literal null}.
|
||||
*/
|
||||
private DistinctQueryContext(@Nullable Object query, String fieldName) {
|
||||
|
||||
super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query);
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType, ProjectionFactory projectionFactory) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(new Document(fieldName, 1), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mapped field name to project to.
|
||||
*
|
||||
* @param entity can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getMappedFieldName(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return getMappedFields(entity).keySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the MongoDB native representation of the given {@literal type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
<T> Class<T> getDriverCompatibleClass(Class<T> type) {
|
||||
|
||||
return codecRegistryProvider.getCodecFor(type) //
|
||||
.map(Codec::getEncoderClass) //
|
||||
.orElse((Class<T>) BsonValue.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the most speficic read target type based on the user {@literal requestedTargetType} an the property type
|
||||
* based on meta information extracted from the {@literal domainType}.
|
||||
*
|
||||
* @param requestedTargetType must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Class<?> getMostSpecificConversionTargetType(Class<?> requestedTargetType, Class<?> domainType) {
|
||||
|
||||
Class<?> conversionTargetType = requestedTargetType;
|
||||
try {
|
||||
|
||||
Class<?> propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType();
|
||||
|
||||
// use the more specific type but favor UserType over property one
|
||||
if (ClassUtils.isAssignable(requestedTargetType, propertyType)) {
|
||||
conversionTargetType = propertyType;
|
||||
}
|
||||
} catch (PropertyReferenceException e) {
|
||||
// just don't care about it as we default to Object.class anyway.
|
||||
}
|
||||
|
||||
return conversionTargetType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class CountContext extends QueryContext {
|
||||
|
||||
/**
|
||||
* Creates a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
CountContext(@Nullable Query query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType) {
|
||||
return getCountOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType, @Nullable Consumer<CountOptions> callback) {
|
||||
|
||||
CountOptions options = new CountOptions();
|
||||
Query query = getQuery();
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
|
||||
String hint = query.getHint();
|
||||
if (BsonUtils.isJsonDocument(hint)) {
|
||||
options.hint(BsonUtils.parse(hint, codecRegistryProvider));
|
||||
} else {
|
||||
options.hintString(hint);
|
||||
}
|
||||
}
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DeleteContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
|
||||
/**
|
||||
* Crate a new {@link DeleteContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one.
|
||||
*/
|
||||
DeleteContext(@Nullable Query query, boolean multi) {
|
||||
|
||||
super(query);
|
||||
this.multi = multi;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType) {
|
||||
return getDeleteOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType, @Nullable Consumer<DeleteOptions> callback) {
|
||||
|
||||
DeleteOptions options = new DeleteOptions();
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents shall be deleted.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}.
|
||||
*/
|
||||
class UpdateContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
private final boolean upsert;
|
||||
private final @Nullable UpdateDefinition update;
|
||||
private final @Nullable MappedDocument mappedDocument;
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) {
|
||||
this(update, new BasicQuery(query), multi, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) {
|
||||
|
||||
super(query);
|
||||
|
||||
this.multi = multi;
|
||||
this.upsert = upsert;
|
||||
this.update = update;
|
||||
this.mappedDocument = null;
|
||||
}
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
this.update = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType) {
|
||||
return getUpdateOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType, @Nullable Consumer<UpdateOptions> callback) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update != null && update.hasArrayFilters()) {
|
||||
options
|
||||
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType) {
|
||||
return getReplaceOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType, @Nullable Consumer<ReplaceOptions> callback) {
|
||||
|
||||
UpdateOptions updateOptions = getUpdateOptions(domainType);
|
||||
|
||||
ReplaceOptions options = new ReplaceOptions();
|
||||
options.collation(updateOptions.getCollation());
|
||||
options.upsert(updateOptions.isUpsert());
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> domainType) {
|
||||
|
||||
Document mappedQuery = super.getMappedQuery(domainType);
|
||||
|
||||
if (multi && update.isIsolated() && !mappedQuery.containsKey("$isolated")) {
|
||||
mappedQuery.put("$isolated", 1);
|
||||
}
|
||||
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
<T> Document applyShardKey(MongoPersistentEntity<T> domainType, Document filter, @Nullable Document existing) {
|
||||
|
||||
Document shardKeySource = existing != null ? existing
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity<?> domainType) {
|
||||
|
||||
return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType)
|
||||
&& !filter.keySet().containsAll(getMappedShardKeyFields(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities
|
||||
* {@literal id} property.
|
||||
* @since 3.0
|
||||
*/
|
||||
private boolean shardedById(MongoPersistentEntity<?> domainType) {
|
||||
|
||||
ShardKey shardKey = domainType.getShardKey();
|
||||
if (shardKey.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String key = shardKey.getPropertyNames().iterator().next();
|
||||
if ("_id".equals(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = domainType.getIdProperty();
|
||||
return idProperty != null && idProperty.getName().equals(key);
|
||||
}
|
||||
|
||||
Set<String> getMappedShardKeyFields(MongoPersistentEntity<?> entity) {
|
||||
return getMappedShardKey(entity).keySet();
|
||||
}
|
||||
|
||||
Document getMappedShardKey(MongoPersistentEntity<?> entity) {
|
||||
return mappedShardKey.computeIfAbsent(entity.getType(),
|
||||
key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
|
||||
|
||||
AggregationOperationContext context = domainType != null
|
||||
? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped update {@link Document}.
|
||||
*
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
Document getMappedUpdate(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (update != null) {
|
||||
return update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
}
|
||||
return mappedDocument.getDocument();
|
||||
}
|
||||
|
||||
/**
|
||||
* Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not
|
||||
* already done in the actual {@link UpdateDefinition}
|
||||
*
|
||||
* @param persistentEntity can be {@literal null}.
|
||||
*/
|
||||
void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
|
||||
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the update holds an aggregation pipeline.
|
||||
*/
|
||||
boolean isAggregationUpdate() {
|
||||
return update instanceof AggregationUpdate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents should be updated.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,6 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
@@ -39,14 +38,13 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Juergen Zimmermann
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFindOperation {
|
||||
@@ -146,18 +144,6 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -273,21 +259,9 @@ public interface ReactiveFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,7 +19,6 @@ import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -31,7 +30,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -147,18 +146,6 @@ public interface ReactiveMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -22,7 +22,7 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
|
||||
@@ -15,17 +15,192 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
* @deprecated since 3.0 - Use {@link MongoClientSettingsFactoryBean} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends MongoClientSettingsFactoryBean {
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private List<MongoCredential> credentialList = new ArrayList<>();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings();
|
||||
private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings();
|
||||
private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings();
|
||||
private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings();
|
||||
private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings();
|
||||
private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings();
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern}.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadConcern}.
|
||||
*
|
||||
* @param readConcern
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the List of {@link MongoCredential}s.
|
||||
*
|
||||
* @param credentialList must not be {@literal null}.
|
||||
*/
|
||||
public void setCredentialList(List<MongoCredential> credentialList) {
|
||||
|
||||
Assert.notNull(credentialList, "CredendialList must not be null!");
|
||||
|
||||
this.credentialList.addAll(credentialList);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the {@link MongoCredential} to the list of credentials.
|
||||
*
|
||||
* @param mongoCredential must not be {@literal null}.
|
||||
*/
|
||||
public void addMongoCredential(MongoCredential mongoCredential) {
|
||||
|
||||
Assert.notNull(mongoCredential, "MongoCredential must not be null!");
|
||||
|
||||
this.credentialList.add(mongoCredential);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link StreamFactoryFactory}.
|
||||
*
|
||||
* @param streamFactoryFactory
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link CodecRegistry}.
|
||||
*
|
||||
* @param codecRegistry
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClusterSettings}.
|
||||
*
|
||||
* @param clusterSettings
|
||||
*/
|
||||
public void setClusterSettings(ClusterSettings clusterSettings) {
|
||||
this.clusterSettings = clusterSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SocketSettings}.
|
||||
*
|
||||
* @param socketSettings
|
||||
*/
|
||||
public void setSocketSettings(SocketSettings socketSettings) {
|
||||
this.socketSettings = socketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the heartbeat {@link SocketSettings}.
|
||||
*
|
||||
* @param heartbeatSocketSettings
|
||||
*/
|
||||
public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) {
|
||||
this.heartbeatSocketSettings = heartbeatSocketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ConnectionPoolSettings}.
|
||||
*
|
||||
* @param connectionPoolSettings
|
||||
*/
|
||||
public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) {
|
||||
this.connectionPoolSettings = connectionPoolSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ServerSettings}.
|
||||
*
|
||||
* @param serverSettings
|
||||
*/
|
||||
public void setServerSettings(ServerSettings serverSettings) {
|
||||
this.serverSettings = serverSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SslSettings}.
|
||||
*
|
||||
* @param sslSettings
|
||||
*/
|
||||
public void setSslSettings(SslSettings sslSettings) {
|
||||
this.sslSettings = sslSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() throws Exception {
|
||||
|
||||
return MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.credentialList(credentialList) //
|
||||
.streamFactoryFactory(streamFactoryFactory) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.clusterSettings(clusterSettings) //
|
||||
.socketSettings(socketSettings) //
|
||||
.heartbeatSocketSettings(heartbeatSocketSettings) //
|
||||
.connectionPoolSettings(connectionPoolSettings) //
|
||||
.serverSettings(serverSettings) //
|
||||
.sslSettings(sslSettings) //
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -40,7 +39,6 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -66,7 +64,6 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
* @see Flux
|
||||
* @see Mono
|
||||
@@ -299,7 +296,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param collectionName name of the collection.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
Mono<MongoCollection<Document>> getCollection(String collectionName);
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
@@ -681,14 +678,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -696,15 +690,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -713,16 +704,13 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parametrized type.
|
||||
* @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()}
|
||||
* this will either be the object as it was before the update or as it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -731,17 +719,14 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()}
|
||||
* this will either be the object as it was before the update or as it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
@@ -1164,111 +1149,99 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, String collectionName);
|
||||
Mono<UpdateResult> upsert(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, String collectionName);
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1276,34 +1249,27 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the specified collection that matches the query document criteria with the
|
||||
* provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, String collectionName);
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1311,16 +1277,13 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by id.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.core;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -107,18 +106,6 @@ public interface ReactiveRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
interface ReactiveRemove<T> extends RemoveWithCollection<T> {}
|
||||
|
||||
@@ -17,11 +17,8 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
|
||||
@@ -121,16 +118,13 @@ public interface ReactiveUpdateOperation {
|
||||
interface UpdateWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Set the {@link UpdateDefinition} to be applied.
|
||||
* Set the {@link org.springframework.data.mongodb.core.query.Update} to be applied.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingUpdate}. Never {@literal null}.
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
TerminatingUpdate<T> apply(UpdateDefinition update);
|
||||
TerminatingUpdate<T> apply(org.springframework.data.mongodb.core.query.Update update);
|
||||
|
||||
/**
|
||||
* Specify {@code replacement} object.
|
||||
@@ -172,18 +166,6 @@ public interface ReactiveUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -63,7 +63,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<?> domainType;
|
||||
Query query;
|
||||
org.springframework.data.mongodb.core.query.UpdateDefinition update;
|
||||
org.springframework.data.mongodb.core.query.Update update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@@ -72,10 +72,10 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingUpdate<T> apply(org.springframework.data.mongodb.core.query.UpdateDefinition update) {
|
||||
public TerminatingUpdate<T> apply(org.springframework.data.mongodb.core.query.Update update) {
|
||||
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import org.springframework.data.mongodb.core.script.ExecutableMongoScript;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.DB;
|
||||
|
||||
/**
|
||||
* Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions.
|
||||
|
||||
@@ -15,7 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
@@ -25,10 +30,8 @@ import com.mongodb.client.MongoDatabase;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @deprecated since 3.0, use {@link SimpleMongoClientDatabaseFactory} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public class SimpleMongoClientDbFactory extends SimpleMongoClientDatabaseFactory {
|
||||
public class SimpleMongoClientDbFactory extends MongoDbFactorySupport<MongoClient> implements DisposableBean {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoClientDbFactory} instance for the given {@code connectionString}.
|
||||
@@ -69,6 +72,45 @@ public class SimpleMongoClientDbFactory extends SimpleMongoClientDatabaseFactory
|
||||
* @param mongoInstanceCreated
|
||||
*/
|
||||
private SimpleMongoClientDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) {
|
||||
super(mongoClient, databaseName, mongoInstanceCreated);
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, new MongoExceptionTranslator());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
|
||||
throw new UnsupportedOperationException(String.format(
|
||||
"%s does not support legacy DBObject API! Please consider using SimpleMongoDbFactory for that purpose.",
|
||||
MongoClient.class));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return getMongoClient().startSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#closeClient()
|
||||
*/
|
||||
@Override
|
||||
protected void closeClient() {
|
||||
getMongoClient().close();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#doGetMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
protected MongoDatabase doGetMongoDatabase(String dbName) {
|
||||
return getMongoClient().getDatabase(dbName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,63 +18,66 @@ package org.springframework.data.mongodb.core;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
* @author George Moraitis
|
||||
* @author Mark Paluch
|
||||
* @deprecated since 2.2 in favor of {@link SimpleMongoClientDbFactory}.
|
||||
*/
|
||||
public class SimpleMongoClientDatabaseFactory extends MongoDatabaseFactorySupport<MongoClient>
|
||||
implements DisposableBean {
|
||||
@Deprecated
|
||||
public class SimpleMongoDbFactory extends MongoDbFactorySupport<MongoClient> implements DisposableBean {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance for the given {@code connectionString}.
|
||||
* Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClientURI}.
|
||||
*
|
||||
* @param connectionString connection coordinates for a database connection. Must contain a database name and must not
|
||||
* be {@literal null} or empty.
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/connection-string/">MongoDB Connection String reference</a>
|
||||
* @param uri coordinates for a database connection. Must contain a database name and must not be {@literal null}.
|
||||
* @since 1.7
|
||||
*/
|
||||
public SimpleMongoClientDatabaseFactory(String connectionString) {
|
||||
this(new ConnectionString(connectionString));
|
||||
public SimpleMongoDbFactory(MongoClientURI uri) {
|
||||
this(new MongoClient(uri), uri.getDatabase(), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}.
|
||||
*
|
||||
* @param connectionString connection coordinates for a database connection. Must contain also a database name and not
|
||||
* be {@literal null}.
|
||||
*/
|
||||
public SimpleMongoClientDatabaseFactory(ConnectionString connectionString) {
|
||||
this(MongoClients.create(connectionString), connectionString.getDatabase(), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}.
|
||||
* Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @since 1.7
|
||||
*/
|
||||
public SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName) {
|
||||
public SimpleMongoDbFactory(MongoClient mongoClient, String databaseName) {
|
||||
this(mongoClient, databaseName, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoClient
|
||||
* @param databaseName
|
||||
* @param mongoInstanceCreated
|
||||
* @since 1.7
|
||||
*/
|
||||
SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) {
|
||||
private SimpleMongoDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) {
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, new MongoExceptionTranslator());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return getMongoClient().getDB(getDefaultDatabaseName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
@@ -84,7 +87,7 @@ public class SimpleMongoClientDatabaseFactory extends MongoDatabaseFactorySuppor
|
||||
return getMongoClient().startSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#closeClient()
|
||||
*/
|
||||
@@ -93,7 +96,7 @@ public class SimpleMongoClientDatabaseFactory extends MongoDatabaseFactorySuppor
|
||||
getMongoClient().close();
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#doGetMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.core;
|
||||
import lombok.Value;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
@@ -42,7 +41,6 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory {
|
||||
@@ -101,7 +99,7 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public Mono<MongoDatabase> getMongoDatabase() throws DataAccessException {
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(databaseName);
|
||||
}
|
||||
|
||||
@@ -109,16 +107,12 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
public Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException {
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty.");
|
||||
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
MongoDatabase db = mongo.getDatabase(dbName);
|
||||
|
||||
return writeConcern != null ? db.withWriteConcern(writeConcern) : db;
|
||||
});
|
||||
MongoDatabase db = mongo.getDatabase(dbName);
|
||||
return writeConcern != null ? db.withWriteConcern(writeConcern) : db;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -141,15 +135,6 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry()
|
||||
*/
|
||||
@Override
|
||||
public CodecRegistry getCodecRegistry() {
|
||||
return this.mongo.getDatabase(databaseName).getCodecRegistry();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
@@ -186,8 +171,8 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public Mono<MongoDatabase> getMongoDatabase() throws DataAccessException {
|
||||
return delegate.getMongoDatabase().map(this::decorateDatabase);
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return decorateDatabase(delegate.getMongoDatabase());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -195,8 +180,8 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return delegate.getMongoDatabase(dbName).map(this::decorateDatabase);
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return decorateDatabase(delegate.getMongoDatabase(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -208,15 +193,6 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry()
|
||||
*/
|
||||
@Override
|
||||
public CodecRegistry getCodecRegistry() {
|
||||
return delegate.getCodecRegistry();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
@@ -101,14 +100,14 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return value;
|
||||
}
|
||||
|
||||
protected List<Object> append(Object value, Expand expandList) {
|
||||
protected List<Object> append(Object value) {
|
||||
|
||||
if (this.value instanceof List) {
|
||||
|
||||
List<Object> clone = new ArrayList<Object>((List) this.value);
|
||||
|
||||
if (value instanceof Collection && Expand.EXPAND_VALUES.equals(expandList)) {
|
||||
clone.addAll((Collection<?>) value);
|
||||
if (value instanceof List) {
|
||||
clone.addAll((List) value);
|
||||
} else {
|
||||
clone.add(value);
|
||||
}
|
||||
@@ -118,17 +117,6 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return Arrays.asList(this.value, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expand a nested list of values to single entries or keep the list.
|
||||
*/
|
||||
protected enum Expand {
|
||||
EXPAND_VALUES, KEEP_SOURCE
|
||||
}
|
||||
|
||||
protected List<Object> append(Object value) {
|
||||
return append(value, Expand.EXPAND_VALUES);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected java.util.Map<String, Object> append(String key, Object value) {
|
||||
|
||||
|
||||
@@ -1,200 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder.ValueAppender;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Adds new fields to documents. {@code $addFields} outputs documents that contain all existing fields from the input
|
||||
* documents and newly added fields.
|
||||
*
|
||||
* <pre class="code">
|
||||
* AddFieldsOperation.addField("totalHomework").withValue("A+").and().addField("totalQuiz").withValue("B-")
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/addFields/">MongoDB Aggregation
|
||||
* Framework: $addFields</a>
|
||||
*/
|
||||
public class AddFieldsOperation extends DocumentEnhancingOperation {
|
||||
|
||||
/**
|
||||
* Create new instance of {@link AddFieldsOperation} adding map keys as exposed fields.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private AddFieldsOperation(Map<Object, Object> source) {
|
||||
super(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new instance of {@link AddFieldsOperation}
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param value can be {@literal null}.
|
||||
*/
|
||||
public AddFieldsOperation(Object field, @Nullable Object value) {
|
||||
this(Collections.singletonMap(field, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the {@link AddFieldsOperation} via {@link AddFieldsOperationBuilder}.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public static AddFieldsOperationBuilder builder() {
|
||||
return new AddFieldsOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate another field to add.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public static ValueAppender addField(String field) {
|
||||
return new AddFieldsOperationBuilder().addField(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the value for a specific field to the operation.
|
||||
*
|
||||
* @param field the target field to add.
|
||||
* @param value the value to assign.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
public AddFieldsOperation addField(Object field, Object value) {
|
||||
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(getValueMap());
|
||||
target.put(field, value);
|
||||
|
||||
return new AddFieldsOperation(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate additional fields to add.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public AddFieldsOperationBuilder and() {
|
||||
return new AddFieldsOperationBuilder(getValueMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String mongoOperator() {
|
||||
return "$addFields";
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public static class AddFieldsOperationBuilder {
|
||||
|
||||
private final Map<Object, Object> valueMap;
|
||||
|
||||
private AddFieldsOperationBuilder() {
|
||||
this.valueMap = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
private AddFieldsOperationBuilder(Map<Object, Object> source) {
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
}
|
||||
|
||||
public AddFieldsOperationBuilder addFieldWithValue(String field, @Nullable Object value) {
|
||||
return addField(field).withValue(value);
|
||||
}
|
||||
|
||||
public AddFieldsOperationBuilder addFieldWithValueOf(String field, Object value) {
|
||||
return addField(field).withValueOf(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the field to add.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link ValueAppender}.
|
||||
*/
|
||||
public ValueAppender addField(String field) {
|
||||
|
||||
return new ValueAppender() {
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValue(Object value) {
|
||||
|
||||
valueMap.put(field, value);
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValueOf(Object value) {
|
||||
|
||||
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values) {
|
||||
|
||||
valueMap.put(field, new ExpressionProjection(operation, values));
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public AddFieldsOperation build() {
|
||||
return new AddFieldsOperation(valueMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface ValueAppender {
|
||||
|
||||
/**
|
||||
* Define the value to assign as is.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValue(@Nullable Object value);
|
||||
|
||||
/**
|
||||
* Define the value to assign. Plain {@link String} values are treated as {@link Field field references}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValueOf(Object value);
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,11 +23,9 @@ import java.util.List;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
@@ -117,17 +115,6 @@ public class Aggregation {
|
||||
return new Aggregation(operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationUpdate} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param operations can be {@literal empty} but must not be {@literal null}.
|
||||
* @return new instance of {@link AggregationUpdate}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public static AggregationUpdate newUpdate(AggregationOperation... operations) {
|
||||
return AggregationUpdate.from(Arrays.asList(operations));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this {@link Aggregation} with the given {@link AggregationOptions} set. Note that options are
|
||||
* supported in MongoDB version 2.6+.
|
||||
@@ -194,24 +181,20 @@ public class Aggregation {
|
||||
/**
|
||||
* Creates a new {@link Aggregation} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @param aggregationOperations must not be {@literal null}.
|
||||
* @param aggregationOperations must not be {@literal null} or empty.
|
||||
* @param options must not be {@literal null} or empty.
|
||||
*/
|
||||
protected Aggregation(List<AggregationOperation> aggregationOperations, AggregationOptions options) {
|
||||
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.isTrue(!aggregationOperations.isEmpty(), "At least one AggregationOperation has to be provided");
|
||||
Assert.notNull(options, "AggregationOptions must not be null!");
|
||||
|
||||
// check $out/$merge is the last operation if it exists
|
||||
// check $out is the last operation if it exists
|
||||
for (AggregationOperation aggregationOperation : aggregationOperations) {
|
||||
|
||||
if (aggregationOperation instanceof OutOperation && !isLast(aggregationOperation, aggregationOperations)) {
|
||||
throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline.");
|
||||
}
|
||||
|
||||
if (aggregationOperation instanceof MergeOperation && !isLast(aggregationOperation, aggregationOperations)) {
|
||||
throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline.");
|
||||
}
|
||||
}
|
||||
|
||||
this.operations = aggregationOperations;
|
||||
@@ -241,20 +224,6 @@ public class Aggregation {
|
||||
return "_id";
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
|
||||
* <p/>
|
||||
* Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is
|
||||
* an alias for {@code $addFields}.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
* @see AddFieldsOperation
|
||||
* @since 3.0
|
||||
*/
|
||||
public static AddFieldsOperationBuilder addFields() {
|
||||
return AddFieldsOperation.builder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given fields.
|
||||
*
|
||||
@@ -514,30 +483,6 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link MergeOperationBuilder builder} instance to create a new {@link MergeOperation}.
|
||||
*
|
||||
* @return new instance of {@link MergeOperationBuilder}.
|
||||
* @see MergeOperation
|
||||
* @since 3.0
|
||||
*/
|
||||
public static MergeOperationBuilder merge() {
|
||||
return MergeOperation.builder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link OutOperation} using the given collection name. This operation must be the last operation in
|
||||
* the pipeline.
|
||||
@@ -657,26 +602,6 @@ public class Aggregation {
|
||||
return new CountOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link RedactOperation} that can restrict the content of a document based on information stored
|
||||
* within the document itself.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* Aggregation.redact(ConditionalOperators.when(Criteria.where("level").is(5)) //
|
||||
* .then(RedactOperation.PRUNE) //
|
||||
* .otherwise(RedactOperation.DESCEND));
|
||||
* </pre>
|
||||
*
|
||||
* @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or
|
||||
* {@literal $$KEEP}. Must not be {@literal null}.
|
||||
* @return new instance of {@link RedactOperation}. Never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public static RedactOperation redact(AggregationExpression condition) {
|
||||
return new RedactOperation(condition);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for the given field names.
|
||||
*
|
||||
@@ -699,6 +624,19 @@ public class Aggregation {
|
||||
return Fields.from(field(name, target));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions.Builder}.
|
||||
*
|
||||
|
||||
@@ -20,6 +20,7 @@ import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -102,16 +103,4 @@ public interface AggregationOperationContext {
|
||||
.map(PropertyDescriptor::getName) //
|
||||
.toArray(String[]::new));
|
||||
}
|
||||
|
||||
/**
|
||||
* This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for
|
||||
* its existence. Typically the {@link AggregationOperationContext} fails when referencing unknown fields, those that
|
||||
* are not present in one of the previous stages or the input source, throughout the pipeline.
|
||||
*
|
||||
* @return a more relaxed {@link AggregationOperationContext}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default AggregationOperationContext continueOnMissingFieldReference() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
@@ -23,6 +22,8 @@ import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support
|
||||
* aggregation options can be found in the MongoDB reference documentation
|
||||
@@ -44,14 +45,12 @@ public class AggregationOptions {
|
||||
private static final String ALLOW_DISK_USE = "allowDiskUse";
|
||||
private static final String COLLATION = "collation";
|
||||
private static final String COMMENT = "comment";
|
||||
private static final String MAX_TIME = "maxTimeMS";
|
||||
|
||||
private final boolean allowDiskUse;
|
||||
private final boolean explain;
|
||||
private final Optional<Document> cursor;
|
||||
private final Optional<Collation> collation;
|
||||
private final Optional<String> comment;
|
||||
private Duration maxTime = Duration.ZERO;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
@@ -113,7 +112,7 @@ public class AggregationOptions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationOptions} given {@link Document} containing aggregation options.
|
||||
* Creates new {@link AggregationOptions} given {@link DBObject} containing aggregation options.
|
||||
*
|
||||
* @param document must not be {@literal null}.
|
||||
* @return the {@link AggregationOptions}.
|
||||
@@ -130,11 +129,7 @@ public class AggregationOptions {
|
||||
: null;
|
||||
String comment = document.getString(COMMENT);
|
||||
|
||||
AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
if (document.containsKey(MAX_TIME)) {
|
||||
options.maxTime = Duration.ofMillis(document.getLong(MAX_TIME));
|
||||
}
|
||||
return options;
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -211,14 +206,6 @@ public class AggregationOptions {
|
||||
return comment;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior.
|
||||
* @since 3.0
|
||||
*/
|
||||
public Duration getMaxTime() {
|
||||
return maxTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration
|
||||
* applied.
|
||||
@@ -246,10 +233,6 @@ public class AggregationOptions {
|
||||
collation.map(Collation::toDocument).ifPresent(val -> result.append(COLLATION, val));
|
||||
}
|
||||
|
||||
if (hasExecutionTimeLimit() && !result.containsKey(MAX_TIME)) {
|
||||
result.append(MAX_TIME, maxTime.toMillis());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -268,21 +251,9 @@ public class AggregationOptions {
|
||||
collation.ifPresent(val -> document.append(COLLATION, val.toDocument()));
|
||||
comment.ifPresent(val -> document.append(COMMENT, val));
|
||||
|
||||
if (hasExecutionTimeLimit()) {
|
||||
document.append(MAX_TIME, maxTime.toMillis());
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
public boolean hasExecutionTimeLimit() {
|
||||
return !maxTime.isZero() && !maxTime.isNegative();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@@ -308,7 +279,6 @@ public class AggregationOptions {
|
||||
private @Nullable Document cursor;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable String comment;
|
||||
private @Nullable Duration maxTime;
|
||||
|
||||
/**
|
||||
* Defines whether to off-load intensive sort-operations to disk.
|
||||
@@ -385,33 +355,13 @@ public class AggregationOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the time limit for processing.
|
||||
*
|
||||
* @param maxTime {@link Duration#ZERO} is used for the default unbounded behavior. {@link Duration#isNegative()
|
||||
* Negative} values will be ignored.
|
||||
* @return this.
|
||||
* @since 3.0
|
||||
*/
|
||||
public Builder maxTime(@Nullable Duration maxTime) {
|
||||
|
||||
this.maxTime = maxTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions} instance with the given configuration.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public AggregationOptions build() {
|
||||
|
||||
AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
if (maxTime != null) {
|
||||
options.maxTime = maxTime;
|
||||
}
|
||||
|
||||
return options;
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,325 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Abstraction for {@code db.collection.update()} using an aggregation pipeline. Aggregation pipeline updates use a more
|
||||
* expressive update statement expressing conditional updates based on current field values or updating one field using
|
||||
* the value of another field(s).
|
||||
*
|
||||
* <pre class="code">
|
||||
* AggregationUpdate update = AggregationUpdate.update().set("average")
|
||||
* .toValue(ArithmeticOperators.valueOf("tests").avg()).set("grade")
|
||||
* .toValue(ConditionalOperators
|
||||
* .switchCases(CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"),
|
||||
* CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"),
|
||||
* CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"),
|
||||
* CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D"))
|
||||
* .defaultTo("F"));
|
||||
* </pre>
|
||||
*
|
||||
* The above sample is equivalent to the JSON update statement:
|
||||
*
|
||||
* <pre class="code">
|
||||
* db.collection.update(
|
||||
* { },
|
||||
* [
|
||||
* { $set: { average : { $avg: "$tests" } } },
|
||||
* { $set: { grade: { $switch: {
|
||||
* branches: [
|
||||
* { case: { $gte: [ "$average", 90 ] }, then: "A" },
|
||||
* { case: { $gte: [ "$average", 80 ] }, then: "B" },
|
||||
* { case: { $gte: [ "$average", 70 ] }, then: "C" },
|
||||
* { case: { $gte: [ "$average", 60 ] }, then: "D" }
|
||||
* ],
|
||||
* default: "F"
|
||||
* } } } }
|
||||
* ],
|
||||
* { multi: true }
|
||||
* )
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline">MongoDB
|
||||
* Reference Documentation</a>
|
||||
* @since 3.0
|
||||
*/
|
||||
public class AggregationUpdate extends Aggregation implements UpdateDefinition {
|
||||
|
||||
private boolean isolated = false;
|
||||
private final Set<String> keysTouched = new HashSet<>();
|
||||
|
||||
/**
|
||||
* Create new {@link AggregationUpdate}.
|
||||
*/
|
||||
protected AggregationUpdate() {
|
||||
this(new ArrayList<>());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link AggregationUpdate} with the given aggregation pipeline to apply.
|
||||
*
|
||||
* @param pipeline must not be {@literal null}.
|
||||
*/
|
||||
protected AggregationUpdate(List<AggregationOperation> pipeline) {
|
||||
|
||||
super(pipeline);
|
||||
|
||||
for (AggregationOperation operation : pipeline) {
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
((FieldsExposingAggregationOperation) operation).getFields().forEach(it -> {
|
||||
keysTouched.add(it.getName());
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start defining the update pipeline to execute.
|
||||
*
|
||||
* @return new instance of {@link AggregationUpdate}.
|
||||
*/
|
||||
public static AggregationUpdate update() {
|
||||
return new AggregationUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new AggregationUpdate from the given {@link AggregationOperation}s.
|
||||
*
|
||||
* @return new instance of {@link AggregationUpdate}.
|
||||
*/
|
||||
public static AggregationUpdate from(List<AggregationOperation> pipeline) {
|
||||
return new AggregationUpdate(pipeline);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input
|
||||
* documents and newly added fields.
|
||||
*
|
||||
* @param setOperation must not be {@literal null}.
|
||||
* @return this.
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/set/">$set Aggregation Reference</a>
|
||||
*/
|
||||
public AggregationUpdate set(SetOperation setOperation) {
|
||||
|
||||
Assert.notNull(setOperation, "SetOperation must not be null!");
|
||||
|
||||
setOperation.getFields().forEach(it -> {
|
||||
keysTouched.add(it.getName());
|
||||
});
|
||||
operations.add(setOperation);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@code $unset} removes/excludes fields from documents.
|
||||
*
|
||||
* @param unsetOperation must not be {@literal null}.
|
||||
* @return this.
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/unset/">$unset Aggregation
|
||||
* Reference</a>
|
||||
*/
|
||||
public AggregationUpdate unset(UnsetOperation unsetOperation) {
|
||||
|
||||
Assert.notNull(unsetOperation, "UnsetOperation must not be null!");
|
||||
|
||||
operations.add(unsetOperation);
|
||||
keysTouched.addAll(unsetOperation.removedFieldNames());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@code $replaceWith} replaces the input document with the specified document. The operation replaces all existing
|
||||
* fields in the input document, including the <strong>_id</strong> field.
|
||||
*
|
||||
* @param replaceWithOperation
|
||||
* @return this.
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/replaceWith/">$replaceWith Aggregation
|
||||
* Reference</a>
|
||||
*/
|
||||
public AggregationUpdate replaceWith(ReplaceWithOperation replaceWithOperation) {
|
||||
|
||||
Assert.notNull(replaceWithOperation, "ReplaceWithOperation must not be null!");
|
||||
operations.add(replaceWithOperation);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@code $replaceWith} replaces the input document with the value.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public AggregationUpdate replaceWith(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return replaceWith(ReplaceWithOperation.replaceWithValue(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API variant for {@code $set} adding a single {@link SetOperation pipeline operation} every time. To update
|
||||
* multiple fields within one {@link SetOperation} use {@link #set(SetOperation)}.
|
||||
*
|
||||
* @param key must not be {@literal null}.
|
||||
* @return new instance of {@link SetValueAppender}.
|
||||
* @see #set(SetOperation)
|
||||
*/
|
||||
public SetValueAppender set(String key) {
|
||||
|
||||
Assert.notNull(key, "Key must not be null!");
|
||||
|
||||
return new SetValueAppender() {
|
||||
|
||||
@Override
|
||||
public AggregationUpdate toValue(@Nullable Object value) {
|
||||
return set(SetOperation.builder().set(key).toValue(value));
|
||||
}
|
||||
|
||||
@Override
|
||||
public AggregationUpdate toValueOf(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return set(SetOperation.builder().set(key).toValueOf(value));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Short for {@link #unset(UnsetOperation)}.
|
||||
*
|
||||
* @param keys
|
||||
* @return
|
||||
*/
|
||||
public AggregationUpdate unset(String... keys) {
|
||||
|
||||
Assert.notNull(keys, "Keys must not be null!");
|
||||
Assert.noNullElements(keys, "Keys must not contain null elements.");
|
||||
|
||||
return unset(new UnsetOperation(Arrays.stream(keys).map(Fields::field).collect(Collectors.toList())));
|
||||
}
|
||||
|
||||
/**
|
||||
* Prevents a write operation that affects <strong>multiple</strong> documents from yielding to other reads or writes
|
||||
* once the first document is written. <br />
|
||||
* Use with {@link org.springframework.data.mongodb.core.MongoOperations#updateMulti(Query, UpdateDefinition, Class)}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public AggregationUpdate isolated() {
|
||||
|
||||
isolated = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#isIsolated()
|
||||
*/
|
||||
@Override
|
||||
public Boolean isIsolated() {
|
||||
return isolated;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns a update document containing the update pipeline.
|
||||
* The resulting document needs to be unwrapped to be used with update operations.
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getUpdateObject()
|
||||
*/
|
||||
@Override
|
||||
public Document getUpdateObject() {
|
||||
return new Document("", toPipeline(Aggregation.DEFAULT_CONTEXT));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#modifies(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public boolean modifies(String key) {
|
||||
return keysTouched.contains(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#inc(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void inc(String key) {
|
||||
set(new SetOperation(key, ArithmeticOperators.valueOf(key).add(1)));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
StringJoiner joiner = new StringJoiner(",\n", "[\n", "\n]");
|
||||
toPipeline(Aggregation.DEFAULT_CONTEXT).stream().map(SerializationUtils::serializeToJsonSafely)
|
||||
.forEach(joiner::add);
|
||||
return joiner.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fluent API AggregationUpdate builder.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface SetValueAppender {
|
||||
|
||||
/**
|
||||
* Define the target value as is.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
AggregationUpdate toValue(@Nullable Object value);
|
||||
|
||||
/**
|
||||
* Define the target value as value, an {@link AggregationExpression} or a {@link Field} reference.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
AggregationUpdate toValueOf(Object value);
|
||||
}
|
||||
}
|
||||
@@ -511,27 +511,6 @@ public class ArithmeticOperators {
|
||||
: AccumulatorOperators.StdDevSamp.stdDevSampOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that rounds a number to a whole integer or to a specified decimal
|
||||
* place.
|
||||
*
|
||||
* @return new instance of {@link Round}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public Round round() {
|
||||
return usesFieldRef() ? Round.roundValueOf(fieldReference) : Round.roundValueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that rounds a number to a specified decimal place.
|
||||
*
|
||||
* @return new instance of {@link Round}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public Round roundToPlace(int place) {
|
||||
return round().place(place);
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
@@ -1443,100 +1422,4 @@ public class ArithmeticOperators {
|
||||
return new Trunc(value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Round} rounds a number to a whole integer or to a specified decimal place.
|
||||
* <ul>
|
||||
* <li>If {@link Round#place(int)} resolves to a positive integer, {@code $round} rounds to the given decimal
|
||||
* places.</li>
|
||||
* <li>If {@link Round#place(int)} resolves to a negative integer, {@code $round} rounds to the left of the
|
||||
* decimal.</li>
|
||||
* <li>If {@link Round#place(int)} resolves to a zero, {@code $round} rounds using the first digit to the right of the
|
||||
* decimal.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @since 3.0
|
||||
*/
|
||||
public static class Round extends AbstractAggregationExpression {
|
||||
|
||||
private Round(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round the value of the field that resolves to an integer, double, decimal, or long.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link Round}.
|
||||
*/
|
||||
public static Round roundValueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new Round(Collections.singletonList(Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Round the outcome of the given expression hat resolves to an integer, double, decimal, or long.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Round}.
|
||||
*/
|
||||
public static Round roundValueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new Round(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Round the given numeric (integer, double, decimal, or long) value.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link Round}.
|
||||
*/
|
||||
public static Round round(Number value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Round(Collections.singletonList(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* The place to round to. Can be between -20 and 100, exclusive.
|
||||
*
|
||||
* @param place
|
||||
* @return new instance of {@link Round}.
|
||||
*/
|
||||
public Round place(int place) {
|
||||
return new Round(append(place));
|
||||
}
|
||||
|
||||
/**
|
||||
* The place to round to defined by an expression that resolves to an integer between -20 and 100, exclusive.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Round}.
|
||||
*/
|
||||
public Round placeOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new Round(append(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* The place to round to defined by via a field reference that resolves to an integer between -20 and 100,
|
||||
* exclusive.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link Round}.
|
||||
*/
|
||||
public Round placeOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
return new Round(append(Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$round";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -411,7 +411,7 @@ public class ComparisonOperators {
|
||||
public Cmp compareToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Cmp(append(value, Expand.KEEP_SOURCE));
|
||||
return new Cmp(append(value));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -488,7 +488,7 @@ public class ComparisonOperators {
|
||||
public Eq equalToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Eq(append(value, Expand.KEEP_SOURCE));
|
||||
return new Eq(append(value));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -873,7 +873,7 @@ public class ComparisonOperators {
|
||||
public Ne notEqualToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Ne(append(value, Expand.KEEP_SOURCE));
|
||||
return new Ne(append(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Base class for common tasks required by {@link SetOperation} and {@link AddFieldsOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
abstract class DocumentEnhancingOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
private final Map<Object, Object> valueMap;
|
||||
|
||||
private ExposedFields exposedFields = ExposedFields.empty();
|
||||
|
||||
protected DocumentEnhancingOperation(Map<Object, Object> source) {
|
||||
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
for (Object key : source.keySet()) {
|
||||
this.exposedFields = add(key);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext(
|
||||
exposedFields, context);
|
||||
|
||||
if (valueMap.size() == 1) {
|
||||
return context.getMappedObject(
|
||||
new Document(mongoOperator(), toSetEntry(valueMap.entrySet().iterator().next(), operationContext)));
|
||||
}
|
||||
|
||||
Document $set = new Document();
|
||||
valueMap.entrySet().stream().map(it -> toSetEntry(it, operationContext)).forEach($set::putAll);
|
||||
return context.getMappedObject(new Document(mongoOperator(), $set));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the String representation of the native MongoDB operator.
|
||||
*/
|
||||
protected abstract String mongoOperator();
|
||||
|
||||
/**
|
||||
* @return the raw value map
|
||||
*/
|
||||
protected Map<Object, Object> getValueMap() {
|
||||
return this.valueMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return exposedFields;
|
||||
}
|
||||
|
||||
private ExposedFields add(Object field) {
|
||||
|
||||
if (field instanceof Field) {
|
||||
return exposedFields.and(new ExposedField((Field) field, true));
|
||||
}
|
||||
if (field instanceof String) {
|
||||
return exposedFields.and(new ExposedField(Fields.field((String) field), true));
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected %s to be a field/property.", field));
|
||||
}
|
||||
|
||||
private static Document toSetEntry(Entry<Object, Object> entry, AggregationOperationContext context) {
|
||||
|
||||
String field = entry.getKey() instanceof String ? context.getReference((String) entry.getKey()).getRaw()
|
||||
: context.getReference((Field) entry.getKey()).getRaw();
|
||||
|
||||
Object value = computeValue(entry.getValue(), context);
|
||||
|
||||
return new Document(field, value);
|
||||
}
|
||||
|
||||
private static Object computeValue(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
return context.getReference((Field) value).toString();
|
||||
}
|
||||
|
||||
if (value instanceof ExpressionProjection) {
|
||||
return ((ExpressionProjection) value).toExpression(context);
|
||||
}
|
||||
|
||||
if (value instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) value).toDocument(context);
|
||||
}
|
||||
|
||||
if (value instanceof Collection) {
|
||||
return ((Collection<?>) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link AggregationExpression} based on a SpEL expression.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
static class ExpressionProjection {
|
||||
|
||||
private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer();
|
||||
|
||||
private final String expression;
|
||||
private final Object[] params;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation.ExpressionProjectionOperationBuilder.ExpressionProjection} for the given
|
||||
* field, SpEL expression and parameters.
|
||||
*
|
||||
* @param expression must not be {@literal null} or empty.
|
||||
* @param parameters must not be {@literal null}.
|
||||
*/
|
||||
ExpressionProjection(String expression, Object[] parameters) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
Assert.notNull(parameters, "Parameters must not be null!");
|
||||
|
||||
this.expression = expression;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
Object toExpression(AggregationOperationContext context) {
|
||||
return TRANSFORMER.transform(expression, context, params);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,594 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $merge}-operation.
|
||||
* <p>
|
||||
* We recommend to use the {@link MergeOperationBuilder builder} via {@link MergeOperation#builder()} instead of
|
||||
* creating instances of this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/merge/">MongoDB Documentation</a>
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MergeOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation {
|
||||
|
||||
private final MergeOperationTarget into;
|
||||
private final UniqueMergeId on;
|
||||
private final @Nullable Let let;
|
||||
private final @Nullable WhenDocumentsMatch whenMatched;
|
||||
private final @Nullable WhenDocumentsDontMatch whenNotMatched;
|
||||
|
||||
/**
|
||||
* Create new instance of {@link MergeOperation}.
|
||||
*
|
||||
* @param into the target (collection and database)
|
||||
* @param on the unique identifier. Can be {@literal null}.
|
||||
* @param let exposed variables for {@link WhenDocumentsMatch#updateWith(Aggregation)}. Can be {@literal null}.
|
||||
* @param whenMatched behavior if a result document matches an existing one in the target collection. Can be
|
||||
* {@literal null}.
|
||||
* @param whenNotMatched behavior if a result document does not match an existing one in the target collection. Can be
|
||||
* {@literal null}.
|
||||
*/
|
||||
public MergeOperation(MergeOperationTarget into, UniqueMergeId on, @Nullable Let let,
|
||||
@Nullable WhenDocumentsMatch whenMatched, @Nullable WhenDocumentsDontMatch whenNotMatched) {
|
||||
|
||||
Assert.notNull(into, "Into must not be null! Please provide a target collection.");
|
||||
Assert.notNull(on, "On must not be null! Use UniqueMergeId.id() instead.");
|
||||
|
||||
this.into = into;
|
||||
this.on = on;
|
||||
this.let = let;
|
||||
this.whenMatched = whenMatched;
|
||||
this.whenNotMatched = whenNotMatched;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simplified form to apply all default options for {@code $merge} (including writing to a collection in the same
|
||||
* database).
|
||||
*
|
||||
* @param collection the output collection within the same database.
|
||||
* @return new instance of {@link MergeOperation}.
|
||||
*/
|
||||
public static MergeOperation mergeInto(String collection) {
|
||||
return builder().intoCollection(collection).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Access the {@link MergeOperationBuilder builder API} to create a new instance of {@link MergeOperation}.
|
||||
*
|
||||
* @return new instance of {@link MergeOperationBuilder}.
|
||||
*/
|
||||
public static MergeOperationBuilder builder() {
|
||||
return new MergeOperationBuilder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Aggregation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (isJustCollection()) {
|
||||
return new Document("$merge", into.collection);
|
||||
}
|
||||
|
||||
Document $merge = new Document();
|
||||
$merge.putAll(into.toDocument(context));
|
||||
|
||||
if (!on.isJustIdField()) {
|
||||
$merge.putAll(on.toDocument(context));
|
||||
}
|
||||
|
||||
if (let != null) {
|
||||
$merge.append("let", let.toDocument(context).get("$let", Document.class).get("vars"));
|
||||
}
|
||||
|
||||
if (whenMatched != null) {
|
||||
$merge.putAll(whenMatched.toDocument(context));
|
||||
}
|
||||
|
||||
if (whenNotMatched != null) {
|
||||
$merge.putAll(whenNotMatched.toDocument(context));
|
||||
}
|
||||
|
||||
return new Document("$merge", $merge);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
if (let == null) {
|
||||
return ExposedFields.from();
|
||||
}
|
||||
|
||||
return ExposedFields.synthetic(Fields.fields(let.getVariableNames()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation#inheritsFields()
|
||||
*/
|
||||
@Override
|
||||
public boolean inheritsFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if nothing more than the collection is specified.
|
||||
*/
|
||||
private boolean isJustCollection() {
|
||||
return into.isTargetingSameDatabase() && on.isJustIdField() && let == null && whenMatched == null
|
||||
&& whenNotMatched == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object representing the unique id used during the merge operation to identify duplicates in the target
|
||||
* collection.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class UniqueMergeId {
|
||||
|
||||
private static final UniqueMergeId ID = new UniqueMergeId(Collections.emptyList());
|
||||
|
||||
private final Collection<String> uniqueIdentifier;
|
||||
|
||||
private UniqueMergeId(Collection<String> uniqueIdentifier) {
|
||||
this.uniqueIdentifier = uniqueIdentifier;
|
||||
}
|
||||
|
||||
public static UniqueMergeId ofIdFields(String... fields) {
|
||||
|
||||
Assert.noNullElements(fields, "Fields must not contain null values!");
|
||||
|
||||
if (ObjectUtils.isEmpty(fields)) {
|
||||
return id();
|
||||
}
|
||||
|
||||
return new UniqueMergeId(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge Documents by using the MongoDB {@literal _id} field.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static UniqueMergeId id() {
|
||||
return ID;
|
||||
}
|
||||
|
||||
boolean isJustIdField() {
|
||||
return this.equals(ID);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
List<String> mappedOn = uniqueIdentifier.stream().map(context::getReference).map(FieldReference::getRaw)
|
||||
.collect(Collectors.toList());
|
||||
return new Document("on", mappedOn.size() == 1 ? mappedOn.iterator().next() : mappedOn);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object representing the {@code into} field of a {@code $merge} aggregation stage. <br />
|
||||
* If not stated explicitly via {@link MergeOperationTarget#inDatabase(String)} the {@literal collection} is created
|
||||
* in the very same {@literal database}. In this case {@code into} is just a single String holding the collection
|
||||
* name. <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
* into: "target-collection-name"
|
||||
* </pre>
|
||||
*
|
||||
* If the collection needs to be in a different database {@code into} will be a {@link Document} like the following
|
||||
*
|
||||
* <pre class="code">
|
||||
* {
|
||||
* into: {}
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MergeOperationTarget {
|
||||
|
||||
private final @Nullable String database;
|
||||
private final String collection;
|
||||
|
||||
private MergeOperationTarget(@Nullable String database, String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
this.database = database;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param collection The output collection results will be stored in. Must not be {@literal null}.
|
||||
* @return new instance of {@link MergeOperationTarget}.
|
||||
*/
|
||||
public static MergeOperationTarget collection(String collection) {
|
||||
return new MergeOperationTarget(null, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the target database if different from the source one.
|
||||
*
|
||||
* @param database must not be {@literal null}.
|
||||
* @return new instance of {@link MergeOperationTarget}.
|
||||
*/
|
||||
public MergeOperationTarget inDatabase(String database) {
|
||||
return new MergeOperationTarget(database, collection);
|
||||
}
|
||||
|
||||
boolean isTargetingSameDatabase() {
|
||||
return !StringUtils.hasText(database);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
return new Document("into",
|
||||
!StringUtils.hasText(database) ? collection : new Document("db", database).append("coll", collection));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object specifying how to deal with a result document that matches an existing document in the collection
|
||||
* based on the fields of the {@code on} property describing the unique identifier.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class WhenDocumentsMatch {
|
||||
|
||||
private final Object value;
|
||||
|
||||
private WhenDocumentsMatch(Object value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static WhenDocumentsMatch whenMatchedOf(String value) {
|
||||
return new WhenDocumentsMatch(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the existing document in the output collection with the matching results document.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch replaceDocument() {
|
||||
return whenMatchedOf("replace");
|
||||
}
|
||||
|
||||
/**
|
||||
* Keep the existing document in the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch keepExistingDocument() {
|
||||
return whenMatchedOf("keepExisting");
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge the matching documents. Please see the MongoDB reference documentation for details.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch mergeDocuments() {
|
||||
return whenMatchedOf("merge");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop and fail the aggregation operation. Does not revert already performed changes on previous documents.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch failOnMatch() {
|
||||
return whenMatchedOf("fail");
|
||||
}
|
||||
|
||||
/**
|
||||
* Use an {@link Aggregation} to update the document in the collection. Please see the MongoDB reference
|
||||
* documentation for details.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch updateWith(Aggregation aggregation) {
|
||||
return new WhenDocumentsMatch(aggregation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use an aggregation pipeline to update the document in the collection. Please see the MongoDB reference
|
||||
* documentation for details.
|
||||
*
|
||||
* @param aggregationPipeline must not be {@literal null}.
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch updateWith(List<AggregationOperation> aggregationPipeline) {
|
||||
return new WhenDocumentsMatch(aggregationPipeline);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Aggregation) {
|
||||
return new Document("whenMatched", ((Aggregation) value).toPipeline(context));
|
||||
}
|
||||
|
||||
return new Document("whenMatched", value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object specifying how to deal with a result document that do not match an existing document in the collection
|
||||
* based on the fields of the {@code on} property describing the unique identifier.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class WhenDocumentsDontMatch {
|
||||
|
||||
private final String value;
|
||||
|
||||
private WhenDocumentsDontMatch(String value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method creating {@link WhenDocumentsDontMatch} from a {@code value} literal.
|
||||
*
|
||||
* @param value
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch whenNotMatchedOf(String value) {
|
||||
return new WhenDocumentsDontMatch(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert the document into the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch insertNewDocument() {
|
||||
return whenNotMatchedOf("insert");
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard the document - do not insert the document into the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch discardDocument() {
|
||||
return whenNotMatchedOf("discard");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop and fail the aggregation operation. Does not revert already performed changes on previous documents.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static WhenDocumentsDontMatch failWhenNotMatch() {
|
||||
return whenNotMatchedOf("fail");
|
||||
}
|
||||
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("whenNotMatched", value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder API to construct a {@link MergeOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MergeOperationBuilder {
|
||||
|
||||
private String collection;
|
||||
private @Nullable String database;
|
||||
private UniqueMergeId id = UniqueMergeId.id();
|
||||
private @Nullable Let let;
|
||||
private @Nullable WhenDocumentsMatch whenMatched;
|
||||
private @Nullable WhenDocumentsDontMatch whenNotMatched;
|
||||
|
||||
public MergeOperationBuilder() {}
|
||||
|
||||
/**
|
||||
* Required output collection name to store results to.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor empty.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder intoCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
this.collection = collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally define a target database if different from the current one.
|
||||
*
|
||||
* @param database must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder inDatabase(String database) {
|
||||
|
||||
this.database = database;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the target to store results in.
|
||||
*
|
||||
* @param into must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder into(MergeOperationTarget into) {
|
||||
|
||||
this.database = into.database;
|
||||
this.collection = into.collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the target to store results in.
|
||||
*
|
||||
* @param target must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder target(MergeOperationTarget target) {
|
||||
return into(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a single field or multiple fields that act as a unique identifier for a document. The identifier
|
||||
* determines if a results document matches an already existing document in the output collection. <br />
|
||||
* The aggregation results documents must contain the field(s) specified via {@code on}, unless it's the {@code _id}
|
||||
* field.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder on(String... fields) {
|
||||
return id(UniqueMergeId.ofIdFields(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the identifier that determines if a results document matches an already existing document in the output
|
||||
* collection.
|
||||
*
|
||||
* @param id must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder id(UniqueMergeId id) {
|
||||
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update
|
||||
* aggregation}.
|
||||
*
|
||||
* @param let the variable expressions
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder let(Let let) {
|
||||
|
||||
this.let = let;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update
|
||||
* aggregation}.
|
||||
*
|
||||
* @param let the variable expressions
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder exposeVariablesOf(Let let) {
|
||||
return let(let);
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param whenMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenMatched(WhenDocumentsMatch whenMatched) {
|
||||
|
||||
this.whenMatched = whenMatched;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param whenMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsMatch(WhenDocumentsMatch whenMatched) {
|
||||
return whenMatched(whenMatched);
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link Aggregation action} to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsMatchApply(Aggregation aggregation) {
|
||||
return whenMatched(WhenDocumentsMatch.updateWith(aggregation));
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents do not already exist in the target collection.
|
||||
*
|
||||
* @param whenNotMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenNotMatched(WhenDocumentsDontMatch whenNotMatched) {
|
||||
|
||||
this.whenNotMatched = whenNotMatched;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents do not already exist in the target collection.
|
||||
*
|
||||
* @param whenNotMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsDontMatch(WhenDocumentsDontMatch whenNotMatched) {
|
||||
return whenNotMatched(whenNotMatched);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link MergeOperation}.
|
||||
*/
|
||||
public MergeOperation build() {
|
||||
return new MergeOperation(new MergeOperationTarget(database, collection), id, let, whenMatched, whenNotMatched);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,243 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link RedactOperation} allows to restrict the content of a {@link Document} based on information stored within
|
||||
* itself.
|
||||
*
|
||||
* <pre class="code">
|
||||
* RedactOperation.builder() //
|
||||
* .when(Criteria.where("level").is(5)) //
|
||||
* .thenPrune() //
|
||||
* .otherwiseDescend() //
|
||||
* .build();
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/redact/">https://docs.mongodb.com/manual/reference/operator/aggregation/redact/</a>
|
||||
* @since 3.0
|
||||
*/
|
||||
public class RedactOperation implements AggregationOperation {
|
||||
|
||||
/**
|
||||
* Return fields at the current document level. Exclude embedded ones.
|
||||
*/
|
||||
public static final String DESCEND = "$$DESCEND";
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level.
|
||||
*/
|
||||
public static final String KEEP = "$$KEEP";
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level.
|
||||
*/
|
||||
public static final String PRUNE = "$$PRUNE";
|
||||
|
||||
private final AggregationExpression condition;
|
||||
|
||||
/**
|
||||
* Create new {@link RedactOperation}.
|
||||
*
|
||||
* @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or
|
||||
* {@literal $$KEEP}. Must not be {@literal null}.
|
||||
*/
|
||||
public RedactOperation(AggregationExpression condition) {
|
||||
|
||||
Assert.notNull(condition, "Condition must not be null!");
|
||||
this.condition = condition;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$redact", condition.toDocument(context));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a new instance of {@link RedactOperationBuilder} to specify condition and outcome of the {@literal $redact}
|
||||
* operation.
|
||||
*
|
||||
* @return new instance of {@link RedactOperationBuilder}.
|
||||
*/
|
||||
public static RedactOperationBuilder builder() {
|
||||
return new RedactOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder to create new instance of {@link RedactOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class RedactOperationBuilder {
|
||||
|
||||
private Object when;
|
||||
private Object then;
|
||||
private Object otherwise;
|
||||
|
||||
private RedactOperationBuilder() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(CriteriaDefinition criteria) {
|
||||
|
||||
this.when = criteria;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(AggregationExpression condition) {
|
||||
|
||||
this.when = condition;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(Document condition) {
|
||||
|
||||
this.when = condition;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return fields at the current document level and exclude embedded ones if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenDescend() {
|
||||
return then(DESCEND);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenKeep() {
|
||||
return then(KEEP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenPrune() {
|
||||
return then(PRUNE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP})
|
||||
* when the condition is met.
|
||||
*
|
||||
* @param then must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder then(Object then) {
|
||||
|
||||
this.then = then;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return fields at the current document level and exclude embedded ones if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwiseDescend() {
|
||||
return otherwise(DESCEND);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwiseKeep() {
|
||||
return otherwise(KEEP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwisePrune() {
|
||||
return otherwise(PRUNE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP})
|
||||
* when the condition is not met.
|
||||
*
|
||||
* @param otherwise must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwise(Object otherwise) {
|
||||
this.otherwise = otherwise;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link RedactOperation}.
|
||||
*/
|
||||
public RedactOperation build() {
|
||||
return new RedactOperation(when().then(then).otherwise(otherwise));
|
||||
}
|
||||
|
||||
private ThenBuilder when() {
|
||||
|
||||
if (when instanceof CriteriaDefinition) {
|
||||
return ConditionalOperators.Cond.when((CriteriaDefinition) when);
|
||||
}
|
||||
if (when instanceof AggregationExpression) {
|
||||
return ConditionalOperators.Cond.when((AggregationExpression) when);
|
||||
}
|
||||
if (when instanceof Document) {
|
||||
return ConditionalOperators.Cond.when((Document) when);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Invalid Condition. Expected CriteriaDefinition, AggregationExpression or Document but was %s.", when));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mapping.context.InvalidPersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* A {@link TypeBasedAggregationOperationContext} with less restrictive field reference handling, suppressing
|
||||
* {@link InvalidPersistentPropertyPath} exceptions when resolving mapped field names.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class RelaxedTypeBasedAggregationOperationContext extends TypeBasedAggregationOperationContext {
|
||||
|
||||
/**
|
||||
* Creates a new {@link TypeBasedAggregationOperationContext} for the given type, {@link MappingContext} and
|
||||
* {@link QueryMapper}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
public RelaxedTypeBasedAggregationOperationContext(Class<?> type,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext, QueryMapper mapper) {
|
||||
super(type, mappingContext, mapper);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext#getReferenceFor(rg.springframework.data.mongodb.core.aggregation.Field)
|
||||
*/
|
||||
@Override
|
||||
protected FieldReference getReferenceFor(Field field) {
|
||||
|
||||
try {
|
||||
return super.getReferenceFor(field);
|
||||
} catch (InvalidPersistentPropertyPath e) {
|
||||
return new DirectFieldReference(new ExposedField(field, true));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -33,8 +33,7 @@ import org.springframework.util.Assert;
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/replaceRoot/">MongoDB Aggregation
|
||||
* Framework: $replaceRoot</a>
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/replaceRoot/">MongoDB Aggregation Framework: $replaceRoot</a>
|
||||
*/
|
||||
public class ReplaceRootOperation implements FieldsExposingAggregationOperation {
|
||||
|
||||
@@ -83,7 +82,7 @@ public class ReplaceRootOperation implements FieldsExposingAggregationOperation
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$replaceRoot", new Document("newRoot", getReplacement().toDocumentExpression(context)));
|
||||
return new Document("$replaceRoot", new Document("newRoot", replacement.toDocumentExpression(context)));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -94,16 +93,6 @@ public class ReplaceRootOperation implements FieldsExposingAggregationOperation
|
||||
return ExposedFields.from();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link Replacement}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected Replacement getReplacement() {
|
||||
return replacement;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link ReplaceRootOperation}.
|
||||
*
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $replaceRoot}-operation. <br />
|
||||
* The operation replaces all existing fields including the {@code id} field with @{code $replaceWith}. This way it is
|
||||
* possible to promote an embedded document to the top-level or specify a new document.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/replaceWith/">MongoDB Aggregation
|
||||
* Framework: $replaceWith</a>
|
||||
*/
|
||||
public class ReplaceWithOperation extends ReplaceRootOperation {
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link ReplaceWithOperation}.
|
||||
*
|
||||
* @param replacement must not be {@literal null}.
|
||||
*/
|
||||
public ReplaceWithOperation(Replacement replacement) {
|
||||
super(replacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link ReplaceWithOperation}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceWithOperation}.
|
||||
*/
|
||||
public static ReplaceWithOperation replaceWithValue(Object value) {
|
||||
return new ReplaceWithOperation((ctx) -> value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link ReplaceWithOperation} treating a given {@link String} {@literal value} as a
|
||||
* {@link Field field reference}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static ReplaceWithOperation replaceWithValueOf(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new ReplaceWithOperation((ctx) -> {
|
||||
|
||||
Object target = value instanceof String ? Fields.field((String) value) : value;
|
||||
return computeValue(target, ctx);
|
||||
});
|
||||
}
|
||||
|
||||
private static Object computeValue(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
return context.getReference((Field) value).toString();
|
||||
}
|
||||
if (value instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) value).toDocument(context);
|
||||
}
|
||||
if (value instanceof Collection) {
|
||||
return ((Collection) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return context.getMappedObject(new Document("$replaceWith", getReplacement().toDocumentExpression(context)));
|
||||
}
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.SetOperation.FieldAppender.ValueAppender;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input
|
||||
* documents and newly added fields.
|
||||
*
|
||||
* <pre class="code">
|
||||
* SetOperation.set("totalHomework").toValue("A+").and().set("totalQuiz").toValue("B-")
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/set/">MongoDB Aggregation Framework:
|
||||
* $set</a>
|
||||
*/
|
||||
public class SetOperation extends DocumentEnhancingOperation {
|
||||
|
||||
/**
|
||||
* Create new instance of {@link SetOperation} adding map keys as exposed fields.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private SetOperation(Map<Object, Object> source) {
|
||||
super(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new instance of {@link SetOperation}
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param value can be {@literal null}.
|
||||
*/
|
||||
public SetOperation(Object field, @Nullable Object value) {
|
||||
this(Collections.singletonMap(field, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the {@link SetOperation} via {@link FieldAppender}.
|
||||
*
|
||||
* @return new instance of {@link FieldAppender}.
|
||||
*/
|
||||
public static FieldAppender builder() {
|
||||
return new FieldAppender();
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate another field to set.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link ValueAppender}.
|
||||
*/
|
||||
public static ValueAppender set(String field) {
|
||||
return new FieldAppender().set(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the value for a specific field to the operation.
|
||||
*
|
||||
* @param field the target field to set.
|
||||
* @param value the value to assign.
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
public SetOperation set(Object field, Object value) {
|
||||
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(getValueMap());
|
||||
target.put(field, value);
|
||||
|
||||
return new SetOperation(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate additional fields to set.
|
||||
*
|
||||
* @return new instance of {@link FieldAppender}.
|
||||
*/
|
||||
public FieldAppender and() {
|
||||
return new FieldAppender(getValueMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String mongoOperator() {
|
||||
return "$set";
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public static class FieldAppender {
|
||||
|
||||
private final Map<Object, Object> valueMap;
|
||||
|
||||
private FieldAppender() {
|
||||
this.valueMap = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
private FieldAppender(Map<Object, Object> source) {
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the field to set.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link ValueAppender}.
|
||||
*/
|
||||
public ValueAppender set(String field) {
|
||||
|
||||
return new ValueAppender() {
|
||||
|
||||
@Override
|
||||
public SetOperation toValue(Object value) {
|
||||
|
||||
valueMap.put(field, value);
|
||||
return FieldAppender.this.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SetOperation toValueOf(Object value) {
|
||||
|
||||
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
|
||||
return FieldAppender.this.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SetOperation withValueOfExpression(String operation, Object... values) {
|
||||
|
||||
valueMap.put(field, new ExpressionProjection(operation, values));
|
||||
return FieldAppender.this.build();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private SetOperation build() {
|
||||
return new SetOperation(valueMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface ValueAppender {
|
||||
|
||||
/**
|
||||
* Define the value to assign as is.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
SetOperation toValue(@Nullable Object value);
|
||||
|
||||
/**
|
||||
* Define the value to assign. Plain {@link String} values are treated as {@link Field field references}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
SetOperation toValueOf(Object value);
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
SetOperation withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
@@ -127,16 +128,7 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return Fields.fields(fields.toArray(new String[0]));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#continueOnMissingFieldReference()
|
||||
*/
|
||||
@Override
|
||||
public AggregationOperationContext continueOnMissingFieldReference() {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, mapper);
|
||||
}
|
||||
|
||||
protected FieldReference getReferenceFor(Field field) {
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext
|
||||
.getPersistentPropertyPath(field.getTarget(), type);
|
||||
|
||||
@@ -1,144 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
/**
|
||||
* Removes fields from documents.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/unset/">MongoDB Aggregation Framework:
|
||||
* $unset</a>
|
||||
*/
|
||||
public class UnsetOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
private final Collection<Object> fields;
|
||||
|
||||
/**
|
||||
* Create new instance of {@link UnsetOperation}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
*/
|
||||
public UnsetOperation(Collection<Object> fields) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
Assert.noNullElements(fields, "Fields must not contain null values.");
|
||||
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new instance of {@link UnsetOperation}.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return new instance of {@link UnsetOperation}.
|
||||
*/
|
||||
public static UnsetOperation unset(String... fields) {
|
||||
return new UnsetOperation(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Also unset the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return new instance of {@link UnsetOperation}.
|
||||
*/
|
||||
public UnsetOperation and(String... fields) {
|
||||
|
||||
List<Object> target = new ArrayList<>(this.fields);
|
||||
CollectionUtils.mergeArrayIntoCollection(fields, target);
|
||||
return new UnsetOperation(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Also unset the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return new instance of {@link UnsetOperation}.
|
||||
*/
|
||||
public UnsetOperation and(Field... fields) {
|
||||
|
||||
List<Object> target = new ArrayList<>(this.fields);
|
||||
CollectionUtils.mergeArrayIntoCollection(fields, target);
|
||||
return new UnsetOperation(target);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return ExposedFields.from();
|
||||
}
|
||||
|
||||
Collection<String> removedFieldNames() {
|
||||
|
||||
List<String> fieldNames = new ArrayList<>(fields.size());
|
||||
for (Object it : fields) {
|
||||
if (it instanceof Field) {
|
||||
fieldNames.add(((Field) it).getName());
|
||||
} else {
|
||||
fieldNames.add(it.toString());
|
||||
}
|
||||
}
|
||||
return fieldNames;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (fields.size() == 1) {
|
||||
return new Document("$unset", computeFieldName(fields.iterator().next(), context));
|
||||
}
|
||||
|
||||
return new Document("$unset",
|
||||
fields.stream().map(it -> computeFieldName(it, context)).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
private Object computeFieldName(Object field, AggregationOperationContext context) {
|
||||
|
||||
if (field instanceof Field) {
|
||||
return context.getReference((Field) field).getRaw();
|
||||
}
|
||||
|
||||
if (field instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) field).toDocument(context);
|
||||
}
|
||||
|
||||
if (field instanceof String) {
|
||||
return context.getReference((String) field).getRaw();
|
||||
}
|
||||
|
||||
return field;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -296,7 +296,7 @@ public class VariableOperators {
|
||||
return toLet(ExposedFields.synthetic(Fields.fields(getVariableNames())), context);
|
||||
}
|
||||
|
||||
String[] getVariableNames() {
|
||||
private String[] getVariableNames() {
|
||||
|
||||
String[] varNames = new String[this.vars.size()];
|
||||
for (int i = 0; i < this.vars.size(); i++) {
|
||||
|
||||
@@ -26,7 +26,7 @@ import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.ConverterBuilder;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.EntityInstantiators;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter;
|
||||
@@ -107,8 +107,9 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, Date.class)) {
|
||||
|
||||
conversionService.addConverter(ConverterBuilder
|
||||
.reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp())).getReadingConverter());
|
||||
conversionService.addConverter(
|
||||
ConverterBuilder.reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp()))
|
||||
.getReadingConverter());
|
||||
}
|
||||
|
||||
conversionService
|
||||
|
||||
@@ -44,7 +44,7 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
@@ -70,16 +70,16 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDbRefResolver.class);
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}.
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) {
|
||||
public DefaultDbRefResolver(MongoDbFactory mongoDbFactory) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
@@ -116,7 +116,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Fetching DBRef '{}' from {}.{}.", dbRef.getId(),
|
||||
StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName() : mongoDbFactory.getMongoDatabase().getName(),
|
||||
StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName() : mongoDbFactory.getDb().getName(),
|
||||
dbRef.getCollectionName());
|
||||
}
|
||||
|
||||
@@ -156,7 +156,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Bulk fetching DBRefs {} from {}.{}.", ids,
|
||||
StringUtils.hasText(databaseSource.getDatabaseName()) ? databaseSource.getDatabaseName()
|
||||
: mongoDbFactory.getMongoDatabase().getName(),
|
||||
: mongoDbFactory.getDb().getName(),
|
||||
databaseSource.getCollectionName());
|
||||
}
|
||||
|
||||
@@ -497,7 +497,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
protected MongoCollection<Document> getCollection(DBRef dbref) {
|
||||
|
||||
return (StringUtils.hasText(dbref.getDatabaseName()) ? mongoDbFactory.getMongoDatabase(dbref.getDatabaseName())
|
||||
: mongoDbFactory.getMongoDatabase()).getCollection(dbref.getCollectionName(), Document.class);
|
||||
return (StringUtils.hasText(dbref.getDatabaseName()) ? mongoDbFactory.getDb(dbref.getDatabaseName())
|
||||
: mongoDbFactory.getDb()).getCollection(dbref.getCollectionName(), Document.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,17 +43,16 @@ import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PreferredConstructor.Parameter;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.EntityInstantiator;
|
||||
import org.springframework.data.mapping.model.ParameterValueProvider;
|
||||
import org.springframework.data.mapping.model.PersistentEntityParameterValueProvider;
|
||||
import org.springframework.data.mapping.model.PropertyValueProvider;
|
||||
@@ -61,10 +60,9 @@ import org.springframework.data.mapping.model.SpELContext;
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
@@ -92,10 +90,8 @@ import com.mongodb.DBRef;
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
* @author Mark Paluch
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Heesu Jung
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware, ValueResolver {
|
||||
|
||||
private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s";
|
||||
private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter!";
|
||||
@@ -113,7 +109,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private SpELContext spELContext;
|
||||
private @Nullable EntityCallbacks entityCallbacks;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
@@ -136,19 +131,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.idMapper = new QueryMapper(this);
|
||||
|
||||
this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE);
|
||||
this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext,
|
||||
MappingMongoConverter.this::getValueInternal);
|
||||
this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext, MappingMongoConverter.this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDatabaseFactory} and {@link MappingContext}.
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}.
|
||||
*
|
||||
* @deprecated use the constructor taking a {@link DbRefResolver} instead.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MappingMongoConverter(MongoDatabaseFactory mongoDbFactory,
|
||||
public MappingMongoConverter(MongoDbFactory mongoDbFactory,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
this(new DefaultDbRefResolver(mongoDbFactory), mappingContext);
|
||||
setCodecRegistryProvider(mongoDbFactory);
|
||||
@@ -216,26 +210,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
this.applicationContext = applicationContext;
|
||||
this.spELContext = new SpELContext(this.spELContext, applicationContext);
|
||||
|
||||
if (entityCallbacks == null) {
|
||||
setEntityCallbacks(EntityCallbacks.create(applicationContext));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link EntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link AfterConvertCallback}.
|
||||
* <p />
|
||||
* Overrides potentially existing {@link EntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if the given instance is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public void setEntityCallbacks(EntityCallbacks entityCallbacks) {
|
||||
|
||||
Assert.notNull(entityCallbacks, "EntityCallbacks must not be null!");
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -252,9 +226,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private <S extends Object> S read(TypeInformation<S> type, Bson bson, ObjectPath path) {
|
||||
private <S extends Object> S read(TypeInformation<S> type, @Nullable Bson bson, ObjectPath path) {
|
||||
|
||||
Assert.notNull(bson, "Bson must not be null!");
|
||||
if (null == bson) {
|
||||
return null;
|
||||
}
|
||||
|
||||
TypeInformation<? extends S> typeToUse = typeMapper.readType(bson, type);
|
||||
Class<? extends S> rawType = typeToUse.getType();
|
||||
@@ -455,7 +431,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
SpELExpressionEvaluator evaluator) {
|
||||
|
||||
return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), currentPath, evaluator,
|
||||
MappingMongoConverter.this::getValueInternal);
|
||||
MappingMongoConverter.this);
|
||||
}
|
||||
|
||||
private void readAssociation(Association<MongoPersistentProperty> association, PersistentPropertyAccessor<?> accessor,
|
||||
@@ -1067,8 +1043,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("No id property found on class " + entity.getType());
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator,
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.ValueResolver#getValueInternal(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, com.mongodb.Document, org.springframework.data.mapping.model.SpELExpressionEvaluator, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator,
|
||||
ObjectPath path) {
|
||||
return new MongoDbPropertyValueProvider(bson, evaluator, path).getPropertyValue(prop);
|
||||
}
|
||||
@@ -1281,16 +1261,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (conversions.isSimpleType(obj.getClass())) {
|
||||
|
||||
Class<?> conversionTargetType;
|
||||
|
||||
if (typeInformation != null && conversions.isSimpleType(typeInformation.getType())) {
|
||||
conversionTargetType = typeInformation.getType();
|
||||
} else {
|
||||
conversionTargetType = Object.class;
|
||||
}
|
||||
|
||||
return getPotentiallyConvertedSimpleWrite(obj, conversionTargetType);
|
||||
// Doesn't need conversion
|
||||
return getPotentiallyConvertedSimpleWrite(obj,
|
||||
typeInformation != null ? typeInformation.getType() : Object.class);
|
||||
}
|
||||
|
||||
if (obj instanceof List) {
|
||||
@@ -1518,7 +1491,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), path, evaluator,
|
||||
MappingMongoConverter.this::getValueInternal);
|
||||
MappingMongoConverter.this);
|
||||
|
||||
DBRef dbref = rawRefValue instanceof DBRef ? (DBRef) rawRefValue : null;
|
||||
return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler);
|
||||
@@ -1600,7 +1573,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@Nullable
|
||||
private <T> T readAndConvertDBRef(@Nullable DBRef dbref, TypeInformation<?> type, ObjectPath path,
|
||||
@Nullable Class<?> rawType) {
|
||||
final Class<?> rawType) {
|
||||
|
||||
List<T> result = bulkReadAndConvertDBRefs(Collections.singletonList(dbref), type, path, rawType);
|
||||
return CollectionUtils.isEmpty(result) ? null : result.iterator().next();
|
||||
@@ -1623,7 +1596,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> List<T> bulkReadAndConvertDBRefs(List<DBRef> dbrefs, TypeInformation<?> type, ObjectPath path,
|
||||
@Nullable Class<?> rawType) {
|
||||
final Class<?> rawType) {
|
||||
|
||||
if (CollectionUtils.isEmpty(dbrefs)) {
|
||||
return Collections.emptyList();
|
||||
@@ -1634,27 +1607,23 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
: bulkReadRefs(dbrefs);
|
||||
String collectionName = dbrefs.iterator().next().getCollectionName();
|
||||
|
||||
List<T> targetList = new ArrayList<>(dbrefs.size());
|
||||
List<T> targeList = new ArrayList<>(dbrefs.size());
|
||||
|
||||
for (Document document : referencedRawDocuments) {
|
||||
|
||||
T target = null;
|
||||
if (document != null) {
|
||||
|
||||
maybeEmitEvent(
|
||||
new AfterLoadEvent<>(document, (Class<T>) (rawType != null ? rawType : Object.class), collectionName));
|
||||
target = (T) read(type, document, path);
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, (Class<T>) rawType, collectionName));
|
||||
}
|
||||
|
||||
final T target = (T) read(type, document, path);
|
||||
targeList.add(target);
|
||||
|
||||
if (target != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, target, collectionName));
|
||||
target = maybeCallAfterConvert(target, document, collectionName);
|
||||
}
|
||||
|
||||
targetList.add(target);
|
||||
}
|
||||
|
||||
return targetList;
|
||||
return targeList;
|
||||
}
|
||||
|
||||
private void maybeEmitEvent(MongoMappingEvent<?> event) {
|
||||
@@ -1668,15 +1637,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return this.applicationContext != null;
|
||||
}
|
||||
|
||||
protected <T> T maybeCallAfterConvert(T object, Document document, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the fetch operation for the given {@link DBRef}.
|
||||
*
|
||||
@@ -1711,12 +1671,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MappingMongoConverter} using the given {@link MongoDatabaseFactory} when loading {@link DBRef}.
|
||||
* Create a new {@link MappingMongoConverter} using the given {@link MongoDbFactory} when loading {@link DBRef}.
|
||||
*
|
||||
* @return new instance of {@link MappingMongoConverter}. Never {@literal null}.
|
||||
* @since 2.1.6
|
||||
*/
|
||||
public MappingMongoConverter with(MongoDatabaseFactory dbFactory) {
|
||||
public MappingMongoConverter with(MongoDbFactory dbFactory) {
|
||||
|
||||
MappingMongoConverter target = new MappingMongoConverter(new DefaultDbRefResolver(dbFactory), mappingContext);
|
||||
target.applicationContext = applicationContext;
|
||||
|
||||
@@ -110,7 +110,7 @@ abstract class MongoConverters {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(ObjectId id) {
|
||||
return id.toString();
|
||||
return id == null ? null : id.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ abstract class MongoConverters {
|
||||
INSTANCE;
|
||||
|
||||
public BigInteger convert(ObjectId source) {
|
||||
return new BigInteger(source.toString(), 16);
|
||||
return source == null ? null : new BigInteger(source.toString(), 16);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,7 +149,7 @@ abstract class MongoConverters {
|
||||
INSTANCE;
|
||||
|
||||
public ObjectId convert(BigInteger source) {
|
||||
return new ObjectId(source.toString(16));
|
||||
return source == null ? null : new ObjectId(source.toString(16));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,7 +157,7 @@ abstract class MongoConverters {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(BigDecimal source) {
|
||||
return source.toString();
|
||||
return source == null ? null : source.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -168,7 +168,7 @@ abstract class MongoConverters {
|
||||
INSTANCE;
|
||||
|
||||
public Decimal128 convert(BigDecimal source) {
|
||||
return new Decimal128(source);
|
||||
return source == null ? null : new Decimal128(source);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,7 +195,7 @@ abstract class MongoConverters {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(BigInteger source) {
|
||||
return source.toString();
|
||||
return source == null ? null : source.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -211,7 +211,7 @@ abstract class MongoConverters {
|
||||
INSTANCE;
|
||||
|
||||
public String convert(URL source) {
|
||||
return source.toString();
|
||||
return source == null ? null : source.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,7 +224,7 @@ abstract class MongoConverters {
|
||||
public URL convert(String source) {
|
||||
|
||||
try {
|
||||
return new URL(source);
|
||||
return source == null ? null : new URL(source);
|
||||
} catch (MalformedURLException e) {
|
||||
throw new ConversionFailedException(SOURCE, TARGET, source, e);
|
||||
}
|
||||
@@ -238,6 +238,11 @@ abstract class MongoConverters {
|
||||
|
||||
@Override
|
||||
public String convert(Document source) {
|
||||
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return source.toJson();
|
||||
}
|
||||
}
|
||||
@@ -253,7 +258,7 @@ abstract class MongoConverters {
|
||||
|
||||
@Override
|
||||
public String convert(Term source) {
|
||||
return source.getFormatted();
|
||||
return source == null ? null : source.getFormatted();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,7 +273,7 @@ abstract class MongoConverters {
|
||||
@Override
|
||||
public NamedMongoScript convert(Document source) {
|
||||
|
||||
if(source.isEmpty()) {
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -290,6 +295,10 @@ abstract class MongoConverters {
|
||||
@Override
|
||||
public Document convert(NamedMongoScript source) {
|
||||
|
||||
if (source == null) {
|
||||
return new Document();
|
||||
}
|
||||
|
||||
Document document = new Document();
|
||||
|
||||
document.put("_id", source.getName());
|
||||
@@ -316,7 +325,7 @@ abstract class MongoConverters {
|
||||
*/
|
||||
@Override
|
||||
public String convert(Currency source) {
|
||||
return source.getCurrencyCode();
|
||||
return source == null ? null : source.getCurrencyCode();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -452,7 +461,7 @@ abstract class MongoConverters {
|
||||
|
||||
@Override
|
||||
public AtomicLong convert(Long source) {
|
||||
return new AtomicLong(source);
|
||||
return source != null ? new AtomicLong(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -468,7 +477,7 @@ abstract class MongoConverters {
|
||||
|
||||
@Override
|
||||
public AtomicInteger convert(Integer source) {
|
||||
return new AtomicInteger(source);
|
||||
return source != null ? new AtomicInteger(source) : null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -483,6 +492,7 @@ abstract class MongoConverters {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public byte[] convert(Binary source) {
|
||||
return source.getData();
|
||||
@@ -500,6 +510,7 @@ abstract class MongoConverters {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Instant convert(BsonTimestamp source) {
|
||||
return Instant.ofEpochSecond(source.getTime(), 0);
|
||||
|
||||
@@ -15,40 +15,26 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.ConverterFactory;
|
||||
import org.springframework.core.convert.converter.GenericConverter;
|
||||
import org.springframework.data.convert.JodaTimeConverters;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Value object to capture custom conversion. {@link MongoCustomConversions} also act as factory for
|
||||
* {@link org.springframework.data.mapping.model.SimpleTypeHolder}
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
* @see org.springframework.data.convert.CustomConversions
|
||||
* @see org.springframework.data.mapping.model.SimpleTypeHolder
|
||||
@@ -85,33 +71,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
* @param converters must not be {@literal null}.
|
||||
*/
|
||||
public MongoCustomConversions(List<?> converters) {
|
||||
this(MongoConverterConfigurationAdapter.from(converters));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoCustomConversions} given {@link MongoConverterConfigurationAdapter}.
|
||||
*
|
||||
* @param conversionConfiguration must not be {@literal null}.
|
||||
* @since 2.3
|
||||
*/
|
||||
protected MongoCustomConversions(MongoConverterConfigurationAdapter conversionConfiguration) {
|
||||
super(conversionConfiguration.createConverterConfiguration());
|
||||
}
|
||||
|
||||
/**
|
||||
* Functional style {@link org.springframework.data.convert.CustomConversions} creation giving users a convenient way
|
||||
* of configuring store specific capabilities by providing deferred hooks to what will be configured when creating the
|
||||
* {@link org.springframework.data.convert.CustomConversions#CustomConversions(ConverterConfiguration) instance}.
|
||||
*
|
||||
* @param configurer must not be {@literal null}.
|
||||
* @since 2.3
|
||||
*/
|
||||
public static MongoCustomConversions create(Consumer<MongoConverterConfigurationAdapter> configurer) {
|
||||
|
||||
MongoConverterConfigurationAdapter adapter = new MongoConverterConfigurationAdapter();
|
||||
configurer.accept(adapter);
|
||||
|
||||
return new MongoCustomConversions(adapter);
|
||||
super(STORE_CONVERSIONS, converters);
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
@@ -139,181 +99,4 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
return source != null ? source.toString() : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoConverterConfigurationAdapter} encapsulates creation of
|
||||
* {@link org.springframework.data.convert.CustomConversions.ConverterConfiguration} with MongoDB specifics.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MongoConverterConfigurationAdapter {
|
||||
|
||||
/**
|
||||
* List of {@literal java.time} types having different representation when rendered via the native
|
||||
* {@link org.bson.codecs.Codec} than the Spring Data {@link Converter}.
|
||||
*/
|
||||
private static final Set<Class<?>> JAVA_DRIVER_TIME_SIMPLE_TYPES = new HashSet<>(
|
||||
Arrays.asList(LocalDate.class, LocalTime.class, LocalDateTime.class));
|
||||
|
||||
private boolean useNativeDriverJavaTimeCodecs = false;
|
||||
private final List<Object> customConverters = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Create a {@link MongoConverterConfigurationAdapter} using the provided {@code converters} and our own codecs for
|
||||
* JSR-310 types.
|
||||
*
|
||||
* @param converters must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static MongoConverterConfigurationAdapter from(List<?> converters) {
|
||||
|
||||
Assert.notNull(converters, "Converters must not be null");
|
||||
|
||||
MongoConverterConfigurationAdapter converterConfigurationAdapter = new MongoConverterConfigurationAdapter();
|
||||
converterConfigurationAdapter.useSpringDataJavaTimeCodecs();
|
||||
converterConfigurationAdapter.registerConverters(converters);
|
||||
|
||||
return converterConfigurationAdapter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether or not to use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for
|
||||
* {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime}
|
||||
* and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}.
|
||||
*
|
||||
* @param useNativeDriverJavaTimeCodecs
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs(boolean useNativeDriverJavaTimeCodecs) {
|
||||
|
||||
this.useNativeDriverJavaTimeCodecs = useNativeDriverJavaTimeCodecs;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for
|
||||
* {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime}
|
||||
* and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}.
|
||||
*
|
||||
* @return this.
|
||||
* @see #useNativeDriverJavaTimeCodecs(boolean)
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs() {
|
||||
return useNativeDriverJavaTimeCodecs(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use SpringData {@link Converter Jsr310 converters} for
|
||||
* {@link org.springframework.data.convert.Jsr310Converters.LocalDateToDateConverter LocalDate},
|
||||
* {@link org.springframework.data.convert.Jsr310Converters.LocalTimeToDateConverter LocalTime} and
|
||||
* {@link org.springframework.data.convert.Jsr310Converters.LocalDateTimeToDateConverter LocalDateTime} using the
|
||||
* {@link ZoneId#systemDefault()}.
|
||||
*
|
||||
* @return this.
|
||||
* @see #useNativeDriverJavaTimeCodecs(boolean)
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter useSpringDataJavaTimeCodecs() {
|
||||
return useNativeDriverJavaTimeCodecs(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom {@link Converter} implementation.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter registerConverter(Converter<?, ?> converter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
customConverters.add(converter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom {@link ConverterFactory} implementation.
|
||||
*
|
||||
* @param converterFactory must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter registerConverterFactory(ConverterFactory<?, ?> converterFactory) {
|
||||
|
||||
Assert.notNull(converterFactory, "ConverterFactory must not be null!");
|
||||
customConverters.add(converterFactory);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add {@link Converter converters}, {@link ConverterFactory factories}, ...
|
||||
*
|
||||
* @param converters must not be {@literal null} nor contain {@literal null} values.
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter registerConverters(Collection<?> converters) {
|
||||
|
||||
Assert.notNull(converters, "Converters must not be null");
|
||||
Assert.noNullElements(converters, "Converters must not be null nor contain null values!");
|
||||
|
||||
customConverters.addAll(converters);
|
||||
return this;
|
||||
}
|
||||
|
||||
ConverterConfiguration createConverterConfiguration() {
|
||||
|
||||
if (!useNativeDriverJavaTimeCodecs) {
|
||||
return new ConverterConfiguration(STORE_CONVERSIONS, this.customConverters);
|
||||
}
|
||||
|
||||
/*
|
||||
* We need to have those converters using UTC as the default ones would go on with the systemDefault.
|
||||
*/
|
||||
List<Object> converters = new ArrayList<>(STORE_CONVERTERS.size() + 3);
|
||||
converters.add(DateToUtcLocalDateConverter.INSTANCE);
|
||||
converters.add(DateToUtcLocalTimeConverter.INSTANCE);
|
||||
converters.add(DateToUtcLocalDateTimeConverter.INSTANCE);
|
||||
converters.addAll(STORE_CONVERTERS);
|
||||
|
||||
StoreConversions storeConversions = StoreConversions
|
||||
.of(new SimpleTypeHolder(JAVA_DRIVER_TIME_SIMPLE_TYPES, MongoSimpleTypes.HOLDER), converters);
|
||||
|
||||
return new ConverterConfiguration(storeConversions, this.customConverters, convertiblePair -> {
|
||||
|
||||
// Avoid default registrations
|
||||
|
||||
if (JAVA_DRIVER_TIME_SIMPLE_TYPES.contains(convertiblePair.getSourceType())
|
||||
&& Date.class.isAssignableFrom(convertiblePair.getTargetType())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
private enum DateToUtcLocalDateTimeConverter implements Converter<Date, LocalDateTime> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public LocalDateTime convert(Date source) {
|
||||
return LocalDateTime.ofInstant(Instant.ofEpochMilli(source.getTime()), ZoneId.of("UTC"));
|
||||
}
|
||||
}
|
||||
|
||||
private enum DateToUtcLocalTimeConverter implements Converter<Date, LocalTime> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public LocalTime convert(Date source) {
|
||||
return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalTime();
|
||||
}
|
||||
}
|
||||
|
||||
private enum DateToUtcLocalDateConverter implements Converter<Date, LocalDate> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public LocalDate convert(Date source) {
|
||||
return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalDate();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -174,7 +175,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
Document mappedSort = new Document();
|
||||
for (Map.Entry<String, Object> entry : BsonUtils.asMap(sortObject).entrySet()) {
|
||||
for(Map.Entry<String,Object> entry : BsonUtils.asMap(sortObject).entrySet()) {
|
||||
|
||||
Field field = createPropertyField(entity, entry.getKey(), mappingContext);
|
||||
mappedSort.put(field.getMappedKey(), entry.getValue());
|
||||
@@ -419,7 +420,7 @@ public class QueryMapper {
|
||||
return false;
|
||||
}
|
||||
|
||||
Class<?> type = value.getClass();
|
||||
Class<? extends Object> type = value.getClass();
|
||||
MongoPersistentProperty property = documentField.getProperty();
|
||||
|
||||
if (property.getActualType().isAssignableFrom(type)) {
|
||||
@@ -443,7 +444,7 @@ public class QueryMapper {
|
||||
protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof Example) {
|
||||
return exampleMapper.getMappedExample((Example<?>) source, entity);
|
||||
return exampleMapper.getMappedExample((Example) source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof List) {
|
||||
@@ -922,8 +923,6 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?|\\.\\d+");
|
||||
private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+");
|
||||
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!";
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
@@ -965,7 +964,7 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
this.path = getPath(removePlaceholders(POSITIONAL_PARAMETER_PATTERN, name));
|
||||
this.path = getPath(name);
|
||||
this.property = path == null ? property : path.getLeafProperty();
|
||||
this.association = findAssociation();
|
||||
}
|
||||
@@ -1073,7 +1072,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given {@code pathExpression}.
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
@@ -1081,8 +1080,8 @@ public class QueryMapper {
|
||||
@Nullable
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
String rawPath = removePlaceholders(POSITIONAL_OPERATOR,
|
||||
removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression));
|
||||
String rawPath = pathExpression.replaceAll("\\.\\d+", "") //
|
||||
.replaceAll(POSITIONAL_OPERATOR.pattern(), "");
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
if (path == null || isPathToJavaLangClassProperty(path)) {
|
||||
@@ -1159,7 +1158,7 @@ public class QueryMapper {
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name, mappingContext);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1170,15 +1169,7 @@ public class QueryMapper {
|
||||
* @since 1.7
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new AssociationConverter(name, getAssociation());
|
||||
}
|
||||
|
||||
protected MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> getMappingContext() {
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
private static String removePlaceholders(Pattern pattern, String raw) {
|
||||
return pattern.matcher(raw).replaceAll("");
|
||||
return new AssociationConverter(getAssociation());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1189,9 +1180,8 @@ public class QueryMapper {
|
||||
|
||||
private final KeyMapper keyMapper;
|
||||
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> ctx) {
|
||||
this.keyMapper = new KeyMapper(rawKey, ctx);
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey) {
|
||||
this.keyMapper = new KeyMapper(rawKey);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1233,8 +1223,7 @@ public class QueryMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
public KeyMapper(String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
public KeyMapper(String key) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
@@ -1254,7 +1243,6 @@ public class QueryMapper {
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
|
||||
if (isPositional) {
|
||||
@@ -1267,7 +1255,7 @@ public class QueryMapper {
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
static boolean isPositionalParameter(String partial) {
|
||||
private static boolean isPositionalParameter(String partial) {
|
||||
|
||||
if ("$".equals(partial)) {
|
||||
return true;
|
||||
@@ -1295,7 +1283,6 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class AssociationConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final String name;
|
||||
private final MongoPersistentProperty property;
|
||||
private boolean associationFound;
|
||||
|
||||
@@ -1304,11 +1291,10 @@ public class QueryMapper {
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public AssociationConverter(String name, Association<MongoPersistentProperty> association) {
|
||||
public AssociationConverter(Association<MongoPersistentProperty> association) {
|
||||
|
||||
Assert.notNull(association, "Association must not be null!");
|
||||
this.property = association.getInverse();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1326,12 +1312,6 @@ public class QueryMapper {
|
||||
associationFound = true;
|
||||
}
|
||||
|
||||
if (associationFound) {
|
||||
if (name.endsWith("$") && property.isCollectionLike()) {
|
||||
return source.getFieldName() + ".$";
|
||||
}
|
||||
}
|
||||
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -272,7 +272,6 @@ public class UpdateMapper extends QueryMapper {
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
@@ -290,7 +289,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key, entity, mappingContext);
|
||||
super(key.replaceAll("\\.\\$(\\[.*\\])?", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
@@ -309,7 +308,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key, getMappingContext());
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -318,7 +317,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new UpdateAssociationConverter(getMappingContext(), getAssociation(), key);
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -335,12 +334,10 @@ public class UpdateMapper extends QueryMapper {
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public UpdateAssociationConverter(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
Association<MongoPersistentProperty> association, String key) {
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(key, association);
|
||||
this.mapper = new KeyMapper(key, mappingContext);
|
||||
super(association);
|
||||
this.mapper = new KeyMapper(key);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -19,14 +19,12 @@ import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Internal API to trigger the resolution of properties.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
interface ValueResolver {
|
||||
|
||||
@@ -40,6 +38,6 @@ interface ValueResolver {
|
||||
* @param parent
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator, ObjectPath path);
|
||||
Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator,
|
||||
ObjectPath path);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class JustOnceLogger {
|
||||
|
||||
private static final Map<String, Set<String>> KNOWN_LOGS = new ConcurrentHashMap<>();
|
||||
private static final String AUTO_INDEX_CREATION_CONFIG_CHANGE;
|
||||
|
||||
static {
|
||||
AUTO_INDEX_CREATION_CONFIG_CHANGE = "Automatic index creation will be disabled by default as of Spring Data MongoDB 3.x."
|
||||
+ System.lineSeparator()
|
||||
+ "\tPlease use 'MongoMappingContext#setAutoIndexCreation(boolean)' or override 'MongoConfigurationSupport#autoIndexCreation()' to be explicit."
|
||||
+ System.lineSeparator()
|
||||
+ "\tHowever, we recommend setting up indices manually in an application ready block. You may use index derivation there as well."
|
||||
+ System.lineSeparator() + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> @EventListener(ApplicationReadyEvent.class)" + System.lineSeparator() //
|
||||
+ "\t> public void initIndicesAfterStartup() {" + System.lineSeparator() //
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class);" + System.lineSeparator()//
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoMappingContext);"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex);" + System.lineSeparator() //
|
||||
+ "\t> }" + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator();
|
||||
}
|
||||
|
||||
static void logWarnIndexCreationConfigurationChange(String loggerName) {
|
||||
warnOnce(loggerName, AUTO_INDEX_CREATION_CONFIG_CHANGE);
|
||||
}
|
||||
|
||||
static void warnOnce(String loggerName, String message) {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(loggerName);
|
||||
if (!logger.isWarnEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!KNOWN_LOGS.containsKey(loggerName)) {
|
||||
|
||||
KNOWN_LOGS.put(loggerName, new ConcurrentSkipListSet<>(Collections.singleton(message)));
|
||||
logger.warn(message);
|
||||
} else {
|
||||
|
||||
Set<String> messages = KNOWN_LOGS.get(loggerName);
|
||||
if (messages.contains(message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
messages.add(message);
|
||||
logger.warn(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,7 +26,7 @@ import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -62,7 +62,7 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDatabaseFactory}.
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param indexOperationsProvider must not be {@literal null}.
|
||||
@@ -74,7 +74,7 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDatabaseFactory}.
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
@@ -139,6 +139,8 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
|
||||
@@ -26,7 +26,7 @@ import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link IndexFilter} implementation for usage with plain {@link Document} as well as {@link CriteriaDefinition} filter
|
||||
* {@link IndexFilter} implementation for usage with plain {@link DBObject} as well as {@link CriteriaDefinition} filter
|
||||
* expressions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -38,7 +38,7 @@ public class PartialIndexFilter implements IndexFilter {
|
||||
private final @NonNull Object filterExpression;
|
||||
|
||||
/**
|
||||
* Create new {@link PartialIndexFilter} for given {@link Document filter expression}.
|
||||
* Create new {@link PartialIndexFilter} for given {@link DBObject filter expression}.
|
||||
*
|
||||
* @param where must not be {@literal null}.
|
||||
* @return
|
||||
|
||||
@@ -142,6 +142,8 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
|
||||
Mono<String> createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
return operationsProvider.indexOps(indexDefinition.getCollection()).ensureIndex(indexDefinition) //
|
||||
.onErrorResume(ReactiveMongoPersistentEntityIndexCreator::isDataIntegrityViolation,
|
||||
e -> translateException(e, indexDefinition));
|
||||
|
||||
@@ -37,7 +37,6 @@ import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -64,8 +63,6 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
private final @Nullable String collation;
|
||||
private final @Nullable Expression collationExpression;
|
||||
|
||||
private final ShardKey shardKey;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the
|
||||
* collection name to the entities simple type name.
|
||||
@@ -95,27 +92,6 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
this.collation = null;
|
||||
this.collationExpression = null;
|
||||
}
|
||||
|
||||
this.shardKey = detectShardKey();
|
||||
}
|
||||
|
||||
private ShardKey detectShardKey() {
|
||||
|
||||
if (!isAnnotationPresent(Sharded.class)) {
|
||||
return ShardKey.none();
|
||||
}
|
||||
|
||||
Sharded sharded = getRequiredAnnotation(Sharded.class);
|
||||
|
||||
String[] keyProperties = sharded.shardKey();
|
||||
if (ObjectUtils.isEmpty(keyProperties)) {
|
||||
keyProperties = new String[] { "_id" };
|
||||
}
|
||||
|
||||
ShardKey shardKey = ShardingStrategy.HASH.equals(sharded.shardingStrategy()) ? ShardKey.hash(keyProperties)
|
||||
: ShardKey.range(keyProperties);
|
||||
|
||||
return sharded.immutableKey() ? ShardKey.immutable(shardKey) : shardKey;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -184,11 +160,6 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
: null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShardKey getShardKey() {
|
||||
return shardKey;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.BasicPersistentEntity#verify()
|
||||
|
||||
@@ -79,14 +79,8 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
this.fieldNamingStrategy = fieldNamingStrategy == null ? PropertyNameFieldNamingStrategy.INSTANCE
|
||||
: fieldNamingStrategy;
|
||||
|
||||
if (isIdProperty() && hasExplicitFieldName()) {
|
||||
|
||||
String annotatedName = getAnnotatedFieldName();
|
||||
if (!ID_FIELD_NAME.equals(annotatedName)) {
|
||||
LOG.warn(
|
||||
"Customizing field name for id property '{}.{}' is not allowed! Custom name ('{}') will not be considered!",
|
||||
owner.getName(), getName(), annotatedName);
|
||||
}
|
||||
if (isIdProperty() && getFieldName() != ID_FIELD_NAME) {
|
||||
LOG.warn("Customizing field name for id property not allowed! Custom name will not be considered!");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,11 +167,6 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
|
||||
FieldType fieldType = fieldAnnotation.targetType();
|
||||
if (fieldType == FieldType.IMPLICIT) {
|
||||
|
||||
if (isEntity()) {
|
||||
return org.bson.Document.class;
|
||||
}
|
||||
|
||||
return getType();
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user