Compare commits
124 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f987f44ef | ||
|
|
5df22233d8 | ||
|
|
9751c6bc8b | ||
|
|
7dc509953a | ||
|
|
2f4f1e524d | ||
|
|
26ddf9d59f | ||
|
|
96ff4e1d14 | ||
|
|
2af13c27a7 | ||
|
|
f4d2fc6231 | ||
|
|
3ab679bcc7 | ||
|
|
318d552797 | ||
|
|
8cfbd39c7e | ||
|
|
3a14af5485 | ||
|
|
00034d899a | ||
|
|
fba6d7d8be | ||
|
|
95c4707902 | ||
|
|
32f3e60c9f | ||
|
|
b57e571033 | ||
|
|
6604c507dd | ||
|
|
e1df28797a | ||
|
|
c6630aa279 | ||
|
|
bf921cdbd7 | ||
|
|
a1b4e6df59 | ||
|
|
4b3312998a | ||
|
|
c5501db577 | ||
|
|
936a0d35f7 | ||
|
|
5dd91d0b6d | ||
|
|
28510de6c8 | ||
|
|
4bbf4cd5cf | ||
|
|
90bd3f0f18 | ||
|
|
1e35116419 | ||
|
|
dd336f0ecb | ||
|
|
d020219ded | ||
|
|
0345eff69a | ||
|
|
55fee27fb6 | ||
|
|
ffba352e15 | ||
|
|
1118df5550 | ||
|
|
29f05af733 | ||
|
|
7bac739146 | ||
|
|
6366d3cec1 | ||
|
|
44913abd80 | ||
|
|
7a7f7c942d | ||
|
|
e9c9938016 | ||
|
|
c9da0a75ff | ||
|
|
581961e79a | ||
|
|
0e0d726457 | ||
|
|
7b5fea960f | ||
|
|
a04821ff90 | ||
|
|
a6bd41bcf2 | ||
|
|
6387eb9762 | ||
|
|
5fb4b036bb | ||
|
|
4f0dc04a81 | ||
|
|
ee59c6b774 | ||
|
|
b0b905ddb7 | ||
|
|
7f7be5e47d | ||
|
|
5a49aa6519 | ||
|
|
b68079c421 | ||
|
|
fde49f2a5a | ||
|
|
4d73d76b9f | ||
|
|
8f2c806403 | ||
|
|
d0eb76946e | ||
|
|
df43d7fcdb | ||
|
|
7b34a602ed | ||
|
|
c829387c82 | ||
|
|
a4e12a96c9 | ||
|
|
7e3f7bd861 | ||
|
|
46ab6b4c94 | ||
|
|
8029acb3fb | ||
|
|
96ffb0b7f4 | ||
|
|
7849b5333b | ||
|
|
fb45b4eb2a | ||
|
|
a6a84421b4 | ||
|
|
c4dcc7d0f5 | ||
|
|
c25a1a9e53 | ||
|
|
81c68955fe | ||
|
|
22ca597fca | ||
|
|
6259cd2c3b | ||
|
|
f153399c3b | ||
|
|
65508eb01b | ||
|
|
a7c25c8524 | ||
|
|
087f8f78b5 | ||
|
|
534ce2e5f5 | ||
|
|
0f4f745d3c | ||
|
|
ca802c7ca4 | ||
|
|
e9d14f41a5 | ||
|
|
3b6880edfd | ||
|
|
0b77906a83 | ||
|
|
993018feb1 | ||
|
|
dd15bbc5c1 | ||
|
|
f6c62d6e2f | ||
|
|
632c499cf1 | ||
|
|
2c9956723e | ||
|
|
d0c2c820d7 | ||
|
|
f69ddb6c61 | ||
|
|
073668b1d1 | ||
|
|
291d84591c | ||
|
|
46806a5606 | ||
|
|
85519eb84d | ||
|
|
f8ee9648da | ||
|
|
c7f9274480 | ||
|
|
0bc0fff24e | ||
|
|
c21b35973d | ||
|
|
44017485fc | ||
|
|
c56a13ad00 | ||
|
|
0a2ea88f3c | ||
|
|
80da9e21ed | ||
|
|
708466b323 | ||
|
|
2905315452 | ||
|
|
ecc9f3fd60 | ||
|
|
c83929c2a3 | ||
|
|
9abf098bdb | ||
|
|
f688cca400 | ||
|
|
f09c622b2f | ||
|
|
6049541d0a | ||
|
|
2a3f862703 | ||
|
|
8b97e932a2 | ||
|
|
5f29bee6c9 | ||
|
|
5b8037a0ad | ||
|
|
1a19e140ee | ||
|
|
83307c424f | ||
|
|
8394523645 | ||
|
|
62be436569 | ||
|
|
44e6f5fa3f | ||
|
|
f2fb05662f |
28
.travis.yml
28
.travis.yml
@@ -1,8 +1,5 @@
|
||||
language: java
|
||||
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
|
||||
before_install:
|
||||
- mkdir -p downloads
|
||||
- mkdir -p var/db var/log
|
||||
@@ -14,20 +11,22 @@ before_install:
|
||||
downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongo --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});"
|
||||
sleep 15
|
||||
|
||||
jdk:
|
||||
- openjdk13
|
||||
- openjdk-ea
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- jdk: openjdk-ea
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- MONGO_VERSION=4.1.10
|
||||
- MONGO_VERSION=4.0.4
|
||||
- MONGO_VERSION=3.6.12
|
||||
- MONGO_VERSION=3.4.20
|
||||
- MONGO_VERSION=4.2.0
|
||||
- MONGO_VERSION=4.0.14
|
||||
- MONGO_VERSION=3.6.16
|
||||
global:
|
||||
- PROFILE=ci
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- oracle-java8-installer
|
||||
|
||||
sudo: false
|
||||
|
||||
cache:
|
||||
@@ -35,4 +34,7 @@ cache:
|
||||
- $HOME/.m2
|
||||
- downloads
|
||||
|
||||
script: "mvn clean dependency:list test -P${PROFILE} -Dsort"
|
||||
install: true
|
||||
|
||||
script:
|
||||
- "./mvnw clean dependency:list test -Pjava11 -Dsort -U"
|
||||
|
||||
79
Jenkinsfile
vendored
79
Jenkinsfile
vendored
@@ -30,42 +30,42 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.1') {
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.1/**"
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.1", "ci/openjdk8-mongodb-4.1/")
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
stage('Publish JDK 14 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk14-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk14-with-mongodb-4.2.0", "ci/openjdk14-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline") {
|
||||
stage("test: baseline (jdk8)") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
@@ -74,7 +74,7 @@ pipeline {
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2:latest'
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
@@ -83,11 +83,11 @@ pipeline {
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,7 +99,7 @@ pipeline {
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.0") {
|
||||
stage("test: mongodb 4.0 (jdk8)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
@@ -111,17 +111,18 @@ pipeline {
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
stage("test: mongodb 4.1") {
|
||||
|
||||
stage("test: mongodb 4.2 (jdk8)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.1:latest'
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
@@ -130,11 +131,31 @@ pipeline {
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk14)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk14-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
141
README.adoc
141
README.adoc
@@ -50,11 +50,11 @@ public class MyService {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient();
|
||||
return MongoClients.create();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -94,6 +94,143 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Upgrading from 2.x
|
||||
|
||||
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||
|
||||
=== XML Namespace
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
| Now exposes a `com.mongodb.client.MongoClient`
|
||||
|
||||
| `<mongo:mongo-client replica-set="..." />`
|
||||
| Was a comma delimited list of replica set members (host/port)
|
||||
| Now defines the replica set name. +
|
||||
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
| `<mongo:db-factory writeConcern="..." />`
|
||||
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
|
||||
|===
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Referencing a `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `<mongo:mongo-client credentials="..." />`
|
||||
| `<mongo:mongo-client credential="..." />`
|
||||
| Single authentication data instead of list.
|
||||
|
||||
| `<mongo:client-options />`
|
||||
| `<mongo:client-settings />`
|
||||
| See `com.mongodb.MongoClientSettings` for details.
|
||||
|===
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
|
||||
| `<mongo:db-factory connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:mongo-client connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:client-settings />`
|
||||
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||
|
||||
|===
|
||||
|
||||
=== Java Configuration
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
| `MongoDataIntegrityViolationException`
|
||||
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||
|
||||
| `BulkOperationException`
|
||||
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||
|
||||
| `ReactiveMongoClientFactoryBean`
|
||||
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|
||||
| `ReactiveMongoClientSettingsFactoryBean`
|
||||
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|===
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
| Creating a `com.mongodb.MongoClientSettings`.
|
||||
|
||||
| `AbstractMongoConfiguration`
|
||||
| `AbstractMongoClientConfiguration` +
|
||||
(Available since 2.1)
|
||||
| Using `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `MongoDbFactory#getLegacyDb()`
|
||||
| -
|
||||
| -
|
||||
|
||||
| `SimpleMongoDbFactory`
|
||||
| `SimpleMongoClientDbFactory` +
|
||||
(Available since 2.1)
|
||||
|
|
||||
|
||||
| `MapReduceOptions#getOutputType()`
|
||||
| `MapReduceOptions#getMapReduceAction()`
|
||||
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||
|
||||
| `Meta\|Query` maxScan & snapshot
|
||||
|
|
||||
|
|
||||
|===
|
||||
|
||||
=== Other Changes
|
||||
|
||||
==== UUID Types
|
||||
|
||||
The MongoDB UUID representation can now be configured with different formats.
|
||||
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||
|
||||
.UUID Codec Configuration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
|
||||
15
ci/openjdk11-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk11-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
15
ci/openjdk14-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk14-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk14:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,14 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
RUN RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,14 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
22
pom.xml
22
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M1</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.3.0.M1</version>
|
||||
<version>2.3.0.RC2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,9 +26,9 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.3.0.M1</springdata.commons>
|
||||
<mongo>3.12.0</mongo>
|
||||
<mongo.reactivestreams>1.13.0</mongo.reactivestreams>
|
||||
<springdata.commons>2.3.0.RC2</springdata.commons>
|
||||
<mongo>4.0.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -127,7 +127,7 @@
|
||||
<!-- MongoDB -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
@@ -137,6 +137,16 @@
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M1</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M1</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.M1</version>
|
||||
<version>3.0.0.RC2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -65,6 +65,12 @@
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -82,28 +88,19 @@
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>${mongo.reactivestreams}</version>
|
||||
<artifactId>mongodb-driver-sync</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>${mongo.reactivestreams}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>bson</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -253,6 +250,13 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.junit-pioneer</groupId>
|
||||
<artifactId>junit-pioneer</artifactId>
|
||||
<version>0.5.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
@@ -340,12 +344,6 @@
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
@@ -19,9 +19,9 @@ import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
@@ -38,12 +38,12 @@ public class BulkOperationException extends DataAccessException {
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
public BulkOperationException(String message, MongoBulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
|
||||
@@ -0,0 +1,112 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} from the underlying factory.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDatabaseFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,7 @@ import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDbFactory}. Used for obtaining
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <p />
|
||||
@@ -41,93 +41,94 @@ import com.mongodb.client.MongoDatabase;
|
||||
public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory} using
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory) {
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory}.
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory} using
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory) {
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory}.
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory,
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDbFactory factory,
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
ClientSession session = doGetSession(factory, sessionSynchronization);
|
||||
|
||||
if (session == null) {
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
MongoDbFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getDb(dbName) : factoryToUse.getDb();
|
||||
MongoDatabaseFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the {@link MongoDbFactory} is actually bound to a {@link ClientSession} that has an active transaction, or
|
||||
* if a {@link TransactionSynchronization} has been registered for the {@link MongoDbFactory resource} and if the
|
||||
* associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
* Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active
|
||||
* transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory
|
||||
* resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active
|
||||
* transaction}.
|
||||
*
|
||||
* @param dbFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return {@literal true} if the factory has an ongoing transaction.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
public static boolean isTransactionActive(MongoDbFactory dbFactory) {
|
||||
public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) {
|
||||
|
||||
if (dbFactory.isTransactionActive()) {
|
||||
return true;
|
||||
@@ -138,7 +139,8 @@ public class MongoDatabaseUtils {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
private static ClientSession doGetSession(MongoDatabaseFactory dbFactory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory);
|
||||
|
||||
@@ -169,7 +171,7 @@ public class MongoDatabaseUtils {
|
||||
return resourceHolder.getSession();
|
||||
}
|
||||
|
||||
private static ClientSession createClientSession(MongoDbFactory dbFactory) {
|
||||
private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
@@ -184,7 +186,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
private final MongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDbFactory dbFactory) {
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory);
|
||||
this.resourceHolder = resourceHolder;
|
||||
|
||||
@@ -15,14 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -31,92 +25,33 @@ import com.mongodb.client.MongoDatabase;
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||
*/
|
||||
public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
@Deprecated
|
||||
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||
*/
|
||||
MongoDatabase getDb() throws DataAccessException;
|
||||
@Deprecated
|
||||
default MongoDatabase getDb() throws DataAccessException {
|
||||
return getMongoDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link DB} instance to access the database with the given name.
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getDb(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the legacy database entry point. Please consider {@link #getDb()} instead.
|
||||
*
|
||||
* @return
|
||||
* @deprecated since 2.1, use {@link #getDb()}. This method will be removed with a future version as it works only
|
||||
* with the legacy MongoDB driver.
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
DB getLegacyDb();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getDb().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDbFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDbFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDbFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return getMongoDatabase(dbName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,15 +36,15 @@ import com.mongodb.client.ClientSession;
|
||||
class MongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private MongoDbFactory dbFactory;
|
||||
private MongoDatabaseFactory dbFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
*/
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDbFactory dbFactory) {
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -75,9 +75,9 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link MongoDbFactory}.
|
||||
* @return the associated {@link MongoDatabaseFactory}.
|
||||
*/
|
||||
public MongoDbFactory getDbFactory() {
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -36,17 +36,18 @@ import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDbFactory}.
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDbFactory} to the thread.
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call.
|
||||
* Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
@@ -58,46 +59,46 @@ import com.mongodb.client.ClientSession;
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
implements ResourceTransactionManager, InitializingBean {
|
||||
|
||||
private @Nullable MongoDbFactory dbFactory;
|
||||
private @Nullable MongoDatabaseFactory dbFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDbFactory db factory} has to be {@link #setDbFactory(MongoDbFactory) set}
|
||||
* before using the instance. Use this constructor to prepare a {@link MongoTransactionManager} via a
|
||||
* {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDbFactory(MongoDbFactory)
|
||||
* @see #setDbFactory(MongoDatabaseFactory)
|
||||
* @see #setTransactionSynchronization(int)
|
||||
*/
|
||||
public MongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory}.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory) {
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory) {
|
||||
this(dbFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory} applying the
|
||||
* given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}
|
||||
* applying the given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
|
||||
@@ -295,11 +296,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoDbFactory} that this instance should manage transactions for.
|
||||
* Set the {@link MongoDatabaseFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDbFactory(MongoDbFactory dbFactory) {
|
||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -315,12 +316,12 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link MongoDbFactory} that this instance manages transactions for.
|
||||
* Get the {@link MongoDatabaseFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public MongoDbFactory getDbFactory() {
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
@@ -329,7 +330,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory getResourceFactory() {
|
||||
public MongoDatabaseFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
@@ -344,7 +345,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) {
|
||||
|
||||
MongoDbFactory dbFactory = getResourceFactory();
|
||||
MongoDatabaseFactory dbFactory = getResourceFactory();
|
||||
|
||||
MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory);
|
||||
resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition));
|
||||
@@ -355,7 +356,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #dbFactory} is {@literal null}.
|
||||
*/
|
||||
private MongoDbFactory getRequiredDbFactory() {
|
||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
@@ -41,16 +42,16 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
Mono<MongoDatabase> getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
@@ -64,10 +65,7 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
CodecRegistry getCodecRegistry();
|
||||
|
||||
/**
|
||||
* Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}.
|
||||
|
||||
@@ -41,6 +41,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
@@ -142,14 +143,13 @@ public class ReactiveMongoDatabaseUtils {
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
.flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory))
|
||||
.switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
private static Mono<MongoDatabase> getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
@@ -99,7 +99,7 @@ class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoDriverInformation;
|
||||
|
||||
/**
|
||||
* Class that exposes the SpringData MongoDB specific information like the current {@link Version} or
|
||||
* {@link MongoDriverInformation driver information}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
.builder(MongoDriverInformation.builder().build()).driverName("spring-data").build();
|
||||
|
||||
/**
|
||||
* Obtain the SpringData MongoDB specific driver information.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static MongoDriverInformation driverInformation() {
|
||||
return DRIVER_INFORMATION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <p />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static Version version() {
|
||||
|
||||
Package pkg = SpringDataMongoDB.class.getPackage();
|
||||
String versionString = (pkg != null ? pkg.getImplementationVersion() : null);
|
||||
|
||||
if (!StringUtils.hasText(versionString)) {
|
||||
|
||||
LOGGER.debug("Unable to find Spring Data MongoDB version.");
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString);
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -17,17 +17,22 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}.
|
||||
@@ -35,40 +40,44 @@ import com.mongodb.client.MongoClient;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoConfiguration
|
||||
*/
|
||||
@Configuration
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
*
|
||||
* @return
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
public MongoClient mongoClient() {
|
||||
return createMongoClient(mongoClientSettings());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
* @see #mongoDbFactory()
|
||||
* @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
|
||||
return new MongoTemplate(databaseFactory, converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
* Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the
|
||||
* {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
* @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter)
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName());
|
||||
public MongoDatabaseFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -91,22 +100,32 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given
|
||||
* {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.MongoClient}.
|
||||
* <p />
|
||||
* <strong>INFO:</strong>In case you want to use {@link com.mongodb.client.MongoClients} for configuration please refer
|
||||
* to {@link AbstractMongoClientConfiguration}.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Ryan Tenney
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoClientConfiguration
|
||||
* @deprecated since 2.2 in favor of {@link AbstractMongoClientConfiguration}.
|
||||
*/
|
||||
@Configuration
|
||||
@Deprecated
|
||||
public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(mongoDbFactory());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -18,13 +18,19 @@ package org.springframework.data.mongodb.config;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for reactive Spring Data MongoDB configuration using JavaConfig.
|
||||
@@ -34,25 +40,33 @@ import com.mongodb.reactivestreams.client.MongoClient;
|
||||
* @since 2.0
|
||||
* @see MongoConfigurationSupport
|
||||
*/
|
||||
@Configuration
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
*/
|
||||
public abstract MongoClient reactiveMongoClient();
|
||||
public MongoClient reactiveMongoClient() {
|
||||
return createReactiveMongoClient(mongoClientSettings());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link ReactiveMongoOperations}.
|
||||
*
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public ReactiveMongoOperations reactiveMongoTemplate() throws Exception {
|
||||
return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MappingMongoConverter mongoConverter) {
|
||||
return new ReactiveMongoTemplate(databaseFactory, mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -60,7 +74,7 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
* {@link MongoClient} instance configured in {@link #reactiveMongoClient()}.
|
||||
*
|
||||
* @see #reactiveMongoClient()
|
||||
* @see #reactiveMongoTemplate()
|
||||
* @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
@@ -70,21 +84,31 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
converter.setCodecRegistryProvider(reactiveMongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createReactiveMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link com.mongodb.ConnectionString}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String connectionString) {
|
||||
|
||||
if (!StringUtils.hasText(connectionString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ConnectionString(connectionString));
|
||||
}
|
||||
}
|
||||
@@ -96,6 +96,9 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
String autoIndexCreation = element.getAttribute("auto-index-creation");
|
||||
boolean autoIndexCreationEnabled = StringUtils.hasText(autoIndexCreation) && Boolean.valueOf(autoIndexCreation);
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element));
|
||||
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
@@ -199,6 +202,11 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) {
|
||||
return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false);
|
||||
}
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) {
|
||||
|
||||
String ctxRef = element.getAttribute("mapping-context-ref");
|
||||
|
||||
@@ -226,6 +234,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation);
|
||||
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
|
||||
@@ -50,10 +50,11 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
|
||||
ParsingUtils.setPropertyValue(builder, element, "port", "port");
|
||||
ParsingUtils.setPropertyValue(builder, element, "host", "host");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credential", "credential");
|
||||
ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet");
|
||||
ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString");
|
||||
|
||||
MongoParsingUtils.parseMongoClientOptions(element, builder);
|
||||
MongoParsingUtils.parseReplicaSet(element, builder);
|
||||
MongoParsingUtils.parseMongoClientSettings(element, builder);
|
||||
|
||||
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME;
|
||||
|
||||
@@ -62,22 +63,34 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId);
|
||||
parserContext.registerBeanComponent(mongoComponent);
|
||||
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils
|
||||
.getServerAddressPropertyEditorBuilder());
|
||||
BeanComponentDefinition connectionStringPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(connectionStringPropertyEditor);
|
||||
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(serverAddressPropertyEditor);
|
||||
|
||||
BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils
|
||||
.getWriteConcernPropertyEditorBuilder());
|
||||
BeanComponentDefinition writeConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(writeConcernEditor);
|
||||
|
||||
BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils
|
||||
.getReadPreferencePropertyEditorBuilder());
|
||||
BeanComponentDefinition readConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readConcernEditor);
|
||||
|
||||
BeanComponentDefinition readPreferenceEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readPreferenceEditor);
|
||||
|
||||
BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils
|
||||
.getMongoCredentialPropertyEditor());
|
||||
BeanComponentDefinition credentialsEditor = helper
|
||||
.getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor());
|
||||
parserContext.registerBeanComponent(credentialsEditor);
|
||||
|
||||
BeanComponentDefinition uuidRepresentationEditor = helper
|
||||
.getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder());
|
||||
parserContext.registerBeanComponent(uuidRepresentationEditor);
|
||||
|
||||
parserContext.popAndRegisterContainingComponent();
|
||||
|
||||
return mongoComponent.getBeanDefinition();
|
||||
|
||||
@@ -20,6 +20,7 @@ import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
@@ -31,11 +32,15 @@ import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingSt
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB to be extended for JavaConfiguration usage.
|
||||
*
|
||||
@@ -75,11 +80,12 @@ public abstract class MongoConfigurationSupport {
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
@@ -88,14 +94,30 @@ public abstract class MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
* {@link #mongoMappingContext()}. Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
* {@link CustomConversions} will be registered with the
|
||||
* {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link #mongoMappingContext()}.
|
||||
* Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
* <p>
|
||||
* <strong>NOTE:</strong> Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB
|
||||
* native simple types and register custom {@link Converter converters}.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public CustomConversions customConversions() {
|
||||
return new MongoCustomConversions(Collections.emptyList());
|
||||
public MongoCustomConversions customConversions() {
|
||||
return MongoCustomConversions.create(this::configureConverters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration hook for {@link MongoCustomConversions} creation.
|
||||
*
|
||||
* @param converterConfigurationAdapter never {@literal null}.
|
||||
* @since 2.3
|
||||
* @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs()
|
||||
* @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs()
|
||||
*/
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -177,11 +199,36 @@ public abstract class MongoConfigurationSupport {
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal true} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will be set to {@literal false}.
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}. <br />
|
||||
* Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings mongoClientSettings() {
|
||||
|
||||
MongoClientSettings.Builder builder = MongoClientSettings.builder();
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
configureClientSettings(builder);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure {@link MongoClientSettings} via its {@link Builder} API.
|
||||
*
|
||||
* @param builder never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
// customization hook
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,14 +32,12 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
import com.mongodb.ConnectionString;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s.
|
||||
@@ -84,10 +82,11 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
// Common setup
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(SimpleMongoClientDatabaseFactory.class);
|
||||
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
BeanDefinition mongoUri = getMongoUri(element, parserContext);
|
||||
BeanDefinition mongoUri = getConnectionString(element, parserContext);
|
||||
|
||||
if (mongoUri != null) {
|
||||
|
||||
@@ -97,7 +96,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String mongoRef = element.getAttribute("mongo-client-ref");
|
||||
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
// Defaulting
|
||||
@@ -119,8 +119,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the
|
||||
* {@link Mongo} instance was registered under.
|
||||
* Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the
|
||||
* name under which the {@link com.mongodb.client.MongoClient} instance was registered under.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param parserContext must not be {@literal null}.
|
||||
@@ -136,8 +136,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured
|
||||
* attributes. <br />
|
||||
* Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes. <br />
|
||||
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except
|
||||
* {@literal write-concern} and/or {@literal id}.
|
||||
*
|
||||
@@ -146,11 +145,19 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
* @return {@literal null} in case no client-/uri defined.
|
||||
*/
|
||||
@Nullable
|
||||
private BeanDefinition getMongoUri(Element element, ParserContext parserContext) {
|
||||
private BeanDefinition getConnectionString(Element element, ParserContext parserContext) {
|
||||
|
||||
boolean hasClientUri = element.hasAttribute("client-uri");
|
||||
String type = null;
|
||||
|
||||
if (!hasClientUri && !element.hasAttribute("uri")) {
|
||||
if (element.hasAttribute("client-uri")) {
|
||||
type = "client-uri";
|
||||
} else if (element.hasAttribute("connection-string")) {
|
||||
type = "connection-string";
|
||||
} else if (element.hasAttribute("uri")) {
|
||||
type = "uri";
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(type)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -164,16 +171,12 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error(
|
||||
"Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!",
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
Class<?> type = MongoClientURI.class;
|
||||
String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type);
|
||||
builder.addConstructorArgValue(uri);
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class);
|
||||
builder.addConstructorArgValue(element.getAttribute(type));
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -43,60 +43,78 @@ abstract class MongoParsingUtils {
|
||||
private MongoParsingUtils() {}
|
||||
|
||||
/**
|
||||
* Parses the mongo replica-set element.
|
||||
*
|
||||
* @param parserContext the parser context
|
||||
* @param element the mongo element
|
||||
* @param mongoBuilder the bean definition builder to populate
|
||||
* @return
|
||||
*/
|
||||
static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) {
|
||||
setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds");
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper
|
||||
* Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper
|
||||
* attributes.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param mongoClientBuilder must not be {@literal null}.
|
||||
*
|
||||
* @param element
|
||||
* @param mongoClientBuilder
|
||||
* @return
|
||||
* @since 1.7
|
||||
* @since 3.0
|
||||
*/
|
||||
public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
|
||||
Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options");
|
||||
|
||||
if (optionsElement == null) {
|
||||
Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings");
|
||||
if (settingsElement == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoClientOptionsFactoryBean.class);
|
||||
.genericBeanDefinition(MongoClientSettingsFactoryBean.class);
|
||||
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier",
|
||||
"threadsAllowedToBlockForConnectionMultiplier");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation");
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition());
|
||||
// SocketSettings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize");
|
||||
|
||||
// Server Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency",
|
||||
"serverHeartbeatFrequencyMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency",
|
||||
"serverMinHeartbeatFrequencyMS");
|
||||
|
||||
// Cluster Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout",
|
||||
"clusterServerSelectionTimeoutMS");
|
||||
|
||||
// Connection Pool Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time",
|
||||
"poolMaxConnectionLifeTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time",
|
||||
"poolMaxConnectionIdleTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay",
|
||||
"poolMaintenanceInitialDelayMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency",
|
||||
"poolMaintenanceFrequencyMS");
|
||||
|
||||
// SSL Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed",
|
||||
"sslInvalidHostNameAllowed");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider");
|
||||
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -118,6 +136,24 @@ abstract class MongoParsingUtils {
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ReadConcernPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* One should only register one bean definition but want to have the convenience of using
|
||||
* AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the
|
||||
@@ -125,7 +161,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() {
|
||||
|
||||
Map<String, String> customEditors = new ManagedMap<String, String>();
|
||||
Map<String, String> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ServerAddress[]",
|
||||
"org.springframework.data.mongodb.config.ServerAddressPropertyEditor");
|
||||
|
||||
@@ -143,7 +179,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<String, Class<?>>();
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
@@ -169,4 +205,41 @@ abstract class MongoParsingUtils {
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadConcernLevel;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the
|
||||
* {@link ReadConcernLevel#fromString(String)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readConcernString) {
|
||||
|
||||
if (!StringUtils.hasText(readConcernString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString)));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link UuidRepresentation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String value) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(UuidRepresentation.valueOf(value));
|
||||
}
|
||||
}
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.WriteConcern;
|
||||
public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* Parse a string to a List<ServerAddress>
|
||||
* Parse a string to a {@link WriteConcern}.
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String writeConcernString) {
|
||||
@@ -51,6 +51,5 @@ public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
// pass on the string to the constructor
|
||||
setValue(new WriteConcern(writeConcernString));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
@@ -62,6 +63,7 @@ import com.mongodb.client.model.*;
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -300,6 +302,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
@@ -421,38 +424,52 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != bulkOperationContext.getEventPublisher()) {
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
}
|
||||
|
||||
@@ -475,6 +492,16 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -64,7 +64,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
this(mongoDbFactory, collectionName, queryMapper, null);
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
|
||||
/**
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}.
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -29,14 +29,14 @@ import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
*/
|
||||
class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final QueryMapper mapper;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory, QueryMapper mapper) {
|
||||
DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) {
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mapper = mapper;
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -43,7 +44,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
@@ -170,6 +171,18 @@ public interface ExecutableFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -291,9 +304,21 @@ public interface ExecutableFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,6 +19,7 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -30,7 +31,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -44,6 +45,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ExecutableMapReduceOperation {
|
||||
@@ -146,6 +148,18 @@ public interface ExecutableMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -119,6 +120,18 @@ public interface ExecutableRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
@@ -210,6 +211,18 @@ public interface ExecutableUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -80,7 +80,11 @@ public class MappedDocument {
|
||||
}
|
||||
|
||||
public Bson getIdFilter() {
|
||||
return Filters.eq(ID_FIELD, document.get(ID_FIELD));
|
||||
return new Document(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public Object get(String key) {
|
||||
return document.get(key);
|
||||
}
|
||||
|
||||
public UpdateDefinition updateWithoutId() {
|
||||
|
||||
@@ -20,7 +20,7 @@ import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
import org.springframework.jmx.export.annotation.ManagedResource;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -34,24 +34,13 @@ import com.mongodb.client.MongoDatabase;
|
||||
@ManagedResource(description = "Mongo Admin Operations")
|
||||
public class MongoAdmin implements MongoAdminOperations {
|
||||
|
||||
private final Object mongoClient;
|
||||
|
||||
/**
|
||||
* @param mongoClient
|
||||
* @deprecated since 2.2 in favor of {@link MongoAdmin#MongoAdmin(com.mongodb.client.MongoClient)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoAdmin(MongoClient mongoClient) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
this.mongoClient = mongoClient;
|
||||
}
|
||||
private final MongoClient mongoClient;
|
||||
|
||||
/**
|
||||
* @param client the underlying {@link com.mongodb.client.MongoClient} used for data access.
|
||||
* @since 2.2
|
||||
*/
|
||||
public MongoAdmin(com.mongodb.client.MongoClient client) {
|
||||
public MongoAdmin(MongoClient client) {
|
||||
|
||||
Assert.notNull(client, "Client must not be null!");
|
||||
this.mongoClient = client;
|
||||
@@ -88,11 +77,6 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
}
|
||||
|
||||
MongoDatabase getDB(String databaseName) {
|
||||
|
||||
if (mongoClient instanceof MongoClient) {
|
||||
return ((MongoClient) mongoClient).getDatabase(databaseName);
|
||||
}
|
||||
|
||||
return ((com.mongodb.client.MongoClient) mongoClient).getDatabase(databaseName);
|
||||
return mongoClient.getDatabase(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,70 +16,72 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.event.ClusterListener;
|
||||
|
||||
/**
|
||||
* Convenient factory for configuring MongoDB.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2 - There is no replacement for this {@link org.springframework.beans.factory.FactoryBean} at
|
||||
* this time. However moving forward the {@link org.springframework.beans.factory.FactoryBean} will be
|
||||
* suitable to provide instances of {@link com.mongodb.client.MongoClient}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator();
|
||||
|
||||
private @Nullable MongoClientOptions mongoClientOptions;
|
||||
private @Nullable MongoClientSettings mongoClientSettings;
|
||||
private @Nullable String host;
|
||||
private @Nullable Integer port;
|
||||
private List<ServerAddress> replicaSetSeeds = Collections.emptyList();
|
||||
private List<MongoCredential> credentials = Collections.emptyList();
|
||||
private @Nullable List<MongoCredential> credential = null;
|
||||
private @Nullable ConnectionString connectionString;
|
||||
private @Nullable String replicaSet = null;
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}.
|
||||
* Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClientOptions
|
||||
*/
|
||||
public void setMongoClientOptions(@Nullable MongoClientOptions mongoClientOptions) {
|
||||
this.mongoClientOptions = mongoClientOptions;
|
||||
public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) {
|
||||
this.mongoClientSettings = mongoClientOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of credentials to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param credentials can be {@literal null}.
|
||||
* @param credential can be {@literal null}.
|
||||
*/
|
||||
public void setCredentials(@Nullable MongoCredential[] credentials) {
|
||||
this.credentials = filterNonNullElementsAsList(credentials);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of {@link ServerAddress} to build up a replica set for.
|
||||
*
|
||||
* @param replicaSetSeeds can be {@literal null}.
|
||||
*/
|
||||
public void setReplicaSetSeeds(@Nullable ServerAddress[] replicaSetSeeds) {
|
||||
this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds);
|
||||
public void setCredential(@Nullable MongoCredential[] credential) {
|
||||
this.credential = Arrays.asList(credential);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -100,6 +102,14 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
public void setConnectionString(@Nullable ConnectionString connectionString) {
|
||||
this.connectionString = connectionString;
|
||||
}
|
||||
|
||||
public void setReplicaSet(@Nullable String replicaSet) {
|
||||
this.replicaSet = replicaSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to use.
|
||||
*
|
||||
@@ -132,12 +142,198 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
*/
|
||||
@Override
|
||||
protected MongoClient createInstance() throws Exception {
|
||||
return createMongoClient(computeClientSetting());
|
||||
}
|
||||
|
||||
if (mongoClientOptions == null) {
|
||||
mongoClientOptions = MongoClientOptions.builder().build();
|
||||
/**
|
||||
* Create {@link MongoClientSettings} based on configuration and priority (lower is better).
|
||||
* <ol>
|
||||
* <li>{@link MongoClientFactoryBean#mongoClientSettings}</li>
|
||||
* <li>{@link MongoClientFactoryBean#connectionString}</li>
|
||||
* <li>default {@link MongoClientSettings}</li>
|
||||
* </ol>
|
||||
*
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings computeClientSetting() {
|
||||
|
||||
if (connectionString != null && (StringUtils.hasText(host) || port != null)) {
|
||||
throw new IllegalStateException("ConnectionString and host/port configuration exclude one another!");
|
||||
}
|
||||
|
||||
return createMongoClient();
|
||||
ConnectionString connectionString = this.connectionString != null ? this.connectionString
|
||||
: new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()),
|
||||
getOrDefault(port, "" + ServerAddress.defaultPort())));
|
||||
|
||||
Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString);
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
|
||||
if (mongoClientSettings != null) {
|
||||
|
||||
MongoClientSettings defaultSettings = MongoClientSettings.builder().build();
|
||||
|
||||
SslSettings sslSettings = mongoClientSettings.getSslSettings();
|
||||
ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings();
|
||||
ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings();
|
||||
SocketSettings socketSettings = mongoClientSettings.getSocketSettings();
|
||||
ServerSettings serverSettings = mongoClientSettings.getServerSettings();
|
||||
|
||||
builder = builder //
|
||||
.applicationName(computeSettingsValue(defaultSettings.getApplicationName(),
|
||||
mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) //
|
||||
.applyToSslSettings(settings -> {
|
||||
|
||||
applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled()));
|
||||
applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed())));
|
||||
settings.context(sslSettings.getContext());
|
||||
}).applyToClusterSettings(settings -> {
|
||||
|
||||
applySettings(settings::hosts,
|
||||
computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList())));
|
||||
|
||||
applySettings(settings::requiredReplicaSetName,
|
||||
computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(),
|
||||
clusterSettings, connectionString.getRequiredReplicaSetName()));
|
||||
|
||||
applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold()));
|
||||
|
||||
applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getServerSelectionTimeout()));
|
||||
|
||||
applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
List<ClusterListener> clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null);
|
||||
if (clusterListeners != null) {
|
||||
clusterListeners.forEach(settings::addClusterListener);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionIdleTime()));
|
||||
|
||||
applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionLifeTime()));
|
||||
|
||||
applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxWaitTime()));
|
||||
|
||||
applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue(
|
||||
(ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(settings::minSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMinConnectionPoolSize()));
|
||||
applySettings(settings::maxSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMaxConnectionPoolSize()));
|
||||
}) //
|
||||
.applyToSocketSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout()));
|
||||
|
||||
applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout()));
|
||||
applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
}) //
|
||||
.applyToServerSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, null));
|
||||
|
||||
applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency()));
|
||||
settings.applySettings(serverSettings);
|
||||
}) //
|
||||
.autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) //
|
||||
.codecRegistry(mongoClientSettings.getCodecRegistry()); //
|
||||
|
||||
applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(),
|
||||
mongoClientSettings.getReadConcern(), connectionString.getReadConcern()));
|
||||
applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(),
|
||||
mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern()));
|
||||
applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(),
|
||||
mongoClientSettings.getReadPreference(), connectionString.getReadPreference()));
|
||||
applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(),
|
||||
mongoClientSettings.getRetryReads(), connectionString.getRetryReads()));
|
||||
applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(),
|
||||
mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue()));
|
||||
applySettings(builder::uuidRepresentation,
|
||||
computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY));
|
||||
}
|
||||
|
||||
if (!CollectionUtils.isEmpty(credential)) {
|
||||
builder = builder.credential(credential.iterator().next());
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(replicaSet)) {
|
||||
builder.applyToClusterSettings((settings) -> {
|
||||
settings.requiredReplicaSetName(replicaSet);
|
||||
});
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private <T> void applySettings(Consumer<T> settingsBuilder, @Nullable T value) {
|
||||
|
||||
if (ObjectUtils.isEmpty(value)) {
|
||||
return;
|
||||
}
|
||||
settingsBuilder.accept(value);
|
||||
}
|
||||
|
||||
private <S, T> T computeSettingsValue(Function<S, T> function, S defaultValueHolder, S settingsValueHolder,
|
||||
@Nullable T connectionStringValue) {
|
||||
return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder),
|
||||
connectionStringValue);
|
||||
}
|
||||
|
||||
private <T> T computeSettingsValue(T defaultValue, T fromSettings, T fromConnectionString) {
|
||||
|
||||
boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings);
|
||||
boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString);
|
||||
|
||||
if (!fromSettingsIsDefault) {
|
||||
return fromSettings;
|
||||
}
|
||||
return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -152,43 +348,11 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
}
|
||||
}
|
||||
|
||||
private MongoClient createMongoClient() throws UnknownHostException {
|
||||
|
||||
if (!CollectionUtils.isEmpty(replicaSetSeeds)) {
|
||||
return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions);
|
||||
private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
|
||||
private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException {
|
||||
|
||||
ServerAddress defaultAddress = new ServerAddress();
|
||||
|
||||
return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(),
|
||||
port != null ? port.intValue() : defaultAddress.getPort());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given array as {@link List} with all {@literal null} elements removed.
|
||||
*
|
||||
* @param elements the elements to filter <T>, can be {@literal null}.
|
||||
* @return a new unmodifiable {@link List#} from the given elements without {@literal null}s.
|
||||
*/
|
||||
private static <T> List<T> filterNonNullElementsAsList(@Nullable T[] elements) {
|
||||
|
||||
if (elements == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<T> candidateElements = new ArrayList<T>();
|
||||
|
||||
for (T element : elements) {
|
||||
if (element != null) {
|
||||
candidateElements.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableList(candidateElements);
|
||||
private String getOrDefault(Object value, String defaultValue) {
|
||||
return !StringUtils.isEmpty(value) ? value.toString() : defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,338 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.DBDecoderFactory;
|
||||
import com.mongodb.DBEncoderFactory;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientOptions} instance.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2 - There is no replacement for this {@link org.springframework.beans.factory.FactoryBean}.
|
||||
* However moving forward there will be a dedicated factory bean for {@link com.mongodb.MongoClientSettings}
|
||||
* replacing {@link MongoClientOptions}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClientOptions> {
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
// TODO: Mongo Driver 4 - use application name insetad of description if not available
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getApplicationName();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
.getThreadsAllowedToBlockForConnectionMultiplier();
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime();
|
||||
private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime();
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
|
||||
// TODO: Mongo Driver 4 - check if available
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory();
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
|
||||
// TODO: Mongo Driver 4 - remove this option
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout();
|
||||
private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout();
|
||||
private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName();
|
||||
private int serverSelectionTimeout = DEFAULT_MONGO_OPTIONS.getServerSelectionTimeout();
|
||||
|
||||
private boolean ssl;
|
||||
private @Nullable SSLSocketFactory sslSocketFactory;
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClient} description.
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
// TODO: Mongo Driver 4 - deprecate that one and add application name
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum number of connections per host.
|
||||
*
|
||||
* @param minConnectionsPerHost
|
||||
*/
|
||||
public void setMinConnectionsPerHost(int minConnectionsPerHost) {
|
||||
this.minConnectionsPerHost = minConnectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of connections allowed per host. Will block if run out. Default is 10. System property
|
||||
* {@code MONGO.POOLSIZE} can override
|
||||
*
|
||||
* @param connectionsPerHost
|
||||
*/
|
||||
public void setConnectionsPerHost(int connectionsPerHost) {
|
||||
this.connectionsPerHost = connectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is
|
||||
* 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an
|
||||
* exception will be thrown.
|
||||
*
|
||||
* @param threadsAllowedToBlockForConnectionMultiplier
|
||||
*/
|
||||
public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) {
|
||||
this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
*
|
||||
* @param maxWaitTime
|
||||
*/
|
||||
public void setMaxWaitTime(int maxWaitTime) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum idle time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionIdleTime
|
||||
*/
|
||||
public void setMaxConnectionIdleTime(int maxConnectionIdleTime) {
|
||||
this.maxConnectionIdleTime = maxConnectionIdleTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum life time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionLifeTime
|
||||
*/
|
||||
public void setMaxConnectionLifeTime(int maxConnectionLifeTime) {
|
||||
this.maxConnectionLifeTime = maxConnectionLifeTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout in milliseconds. 0 is default and infinite.
|
||||
*
|
||||
* @param connectTimeout
|
||||
*/
|
||||
public void setConnectTimeout(int connectTimeout) {
|
||||
this.connectTimeout = connectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout. 0 is default and infinite.
|
||||
*
|
||||
* @param socketTimeout
|
||||
*/
|
||||
public void setSocketTimeout(int socketTimeout) {
|
||||
this.socketTimeout = socketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
*
|
||||
* @param socketKeepAlive
|
||||
*/
|
||||
public void setSocketKeepAlive(boolean socketKeepAlive) {
|
||||
this.socketKeepAlive = socketKeepAlive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(@Nullable ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB
|
||||
* object.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(@Nullable WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketFactory
|
||||
*/
|
||||
public void setSocketFactory(@Nullable SocketFactory socketFactory) {
|
||||
this.socketFactory = socketFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the frequency that the driver will attempt to determine the current state of each server in the cluster.
|
||||
*
|
||||
* @param heartbeatFrequency
|
||||
*/
|
||||
public void setHeartbeatFrequency(int heartbeatFrequency) {
|
||||
this.heartbeatFrequency = heartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* In the event that the driver has to frequently re-check a server's availability, it will wait at least this long
|
||||
* since the previous check to avoid wasted effort.
|
||||
*
|
||||
* @param minHeartbeatFrequency
|
||||
*/
|
||||
public void setMinHeartbeatFrequency(int minHeartbeatFrequency) {
|
||||
this.minHeartbeatFrequency = minHeartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatConnectTimeout
|
||||
*/
|
||||
public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) {
|
||||
this.heartbeatConnectTimeout = heartbeatConnectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatSocketTimeout
|
||||
*/
|
||||
public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) {
|
||||
this.heartbeatSocketTimeout = heartbeatSocketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the name of the replica set.
|
||||
*
|
||||
* @param requiredReplicaSetName
|
||||
*/
|
||||
public void setRequiredReplicaSetName(String requiredReplicaSetName) {
|
||||
this.requiredReplicaSetName = requiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to {@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
public void setSsl(boolean ssl) {
|
||||
this.ssl = ssl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here,
|
||||
* {@link SSLSocketFactory#getDefault()} will be used.
|
||||
*
|
||||
* @param sslSocketFactory
|
||||
*/
|
||||
public void setSslSocketFactory(@Nullable SSLSocketFactory sslSocketFactory) {
|
||||
|
||||
this.sslSocketFactory = sslSocketFactory;
|
||||
this.ssl = sslSocketFactory != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@literal server selection timeout} in msec for a 3.x MongoDB Java driver. If not set the default value of
|
||||
* 30 sec will be used. A value of 0 means that it will timeout immediately if no server is available. A negative
|
||||
* value means to wait indefinitely.
|
||||
*
|
||||
* @param serverSelectionTimeout in msec.
|
||||
*/
|
||||
public void setServerSelectionTimeout(int serverSelectionTimeout) {
|
||||
this.serverSelectionTimeout = serverSelectionTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link AutoEncryptionSettings} to be used.
|
||||
*
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||
*/
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@Override
|
||||
protected MongoClientOptions createInstance() throws Exception {
|
||||
|
||||
SocketFactory socketFactoryToUse = ssl
|
||||
? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault())
|
||||
: this.socketFactory;
|
||||
|
||||
return MongoClientOptions.builder() //
|
||||
.alwaysUseMBeans(this.alwaysUseMBeans) //
|
||||
.connectionsPerHost(this.connectionsPerHost) //
|
||||
.connectTimeout(connectTimeout) //
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.applicationName(description) // TODO: Mongo Driver 4 - use application name if description not available
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
.maxConnectionIdleTime(maxConnectionIdleTime) //
|
||||
.maxConnectionLifeTime(maxConnectionLifeTime) //
|
||||
.maxWaitTime(maxWaitTime) //
|
||||
.minConnectionsPerHost(minConnectionsPerHost) //
|
||||
.minHeartbeatFrequency(minHeartbeatFrequency) //
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.sslEnabled(ssl) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 -
|
||||
.socketKeepAlive(socketKeepAlive) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientOptions.class;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,486 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private @Nullable Boolean retryReads = null;
|
||||
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private @Nullable Boolean retryWrites = null;
|
||||
|
||||
private @Nullable String applicationName = null;
|
||||
|
||||
private @Nullable UuidRepresentation uUidRepresentation = null;
|
||||
|
||||
// --> Socket Settings
|
||||
|
||||
private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings()
|
||||
.getConnectTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize();
|
||||
private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize();
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost();
|
||||
private List<ServerAddress> clusterHosts = Collections.emptyList();
|
||||
private @Nullable ClusterConnectionMode clusterConnectionMode = null;
|
||||
private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType();
|
||||
private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getRequiredReplicaSetName();
|
||||
private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getLocalThreshold(TimeUnit.MILLISECONDS);
|
||||
private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getServerSelectionTimeout(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize();
|
||||
private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize();
|
||||
private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxWaitTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled();
|
||||
private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed();
|
||||
private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled()
|
||||
? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName()
|
||||
: "";
|
||||
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) {
|
||||
this.socketConnectTimeoutMS = socketConnectTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReadTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketReadTimeoutMS(int socketReadTimeoutMS) {
|
||||
this.socketReadTimeoutMS = socketReadTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReceiveBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int)
|
||||
*/
|
||||
public void setSocketReceiveBufferSize(int socketReceiveBufferSize) {
|
||||
this.socketReceiveBufferSize = socketReceiveBufferSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketSendBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int)
|
||||
*/
|
||||
public void setSocketSendBufferSize(int socketSendBufferSize) {
|
||||
this.socketSendBufferSize = socketSendBufferSize;
|
||||
}
|
||||
|
||||
// --> Server Settings
|
||||
|
||||
private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
/**
|
||||
* @param serverHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) {
|
||||
this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverMinHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) {
|
||||
this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
/**
|
||||
* @param clusterSrvHost
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String)
|
||||
*/
|
||||
public void setClusterSrvHost(String clusterSrvHost) {
|
||||
this.clusterSrvHost = clusterSrvHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterHosts
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#hosts(List)
|
||||
*/
|
||||
public void setClusterHosts(ServerAddress[] clusterHosts) {
|
||||
this.clusterHosts = Arrays.asList(clusterHosts);
|
||||
}
|
||||
|
||||
/**
|
||||
* ????
|
||||
*
|
||||
* @param clusterConnectionMode
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode)
|
||||
*/
|
||||
public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) {
|
||||
this.clusterConnectionMode = clusterConnectionMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param custerRequiredClusterType
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType)
|
||||
*/
|
||||
public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) {
|
||||
this.custerRequiredClusterType = custerRequiredClusterType;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterRequiredReplicaSetName
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String)
|
||||
*/
|
||||
public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) {
|
||||
this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterLocalThresholdMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) {
|
||||
this.clusterLocalThresholdMS = clusterLocalThresholdMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterServerSelectionTimeoutMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) {
|
||||
this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS;
|
||||
}
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
/**
|
||||
* @param poolMaxSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int)
|
||||
*/
|
||||
public void setPoolMaxSize(int poolMaxSize) {
|
||||
this.poolMaxSize = poolMaxSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMinSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int)
|
||||
*/
|
||||
public void setPoolMinSize(int poolMinSize) {
|
||||
this.poolMinSize = poolMinSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxWaitTimeMS in mesec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) {
|
||||
this.poolMaxWaitTimeMS = poolMaxWaitTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionLifeTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) {
|
||||
this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionIdleTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) {
|
||||
this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceInitialDelayMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) {
|
||||
this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) {
|
||||
this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS;
|
||||
}
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
/**
|
||||
* @param sslEnabled
|
||||
* @see com.mongodb.connection.SslSettings.Builder#enabled(boolean)
|
||||
*/
|
||||
public void setSslEnabled(Boolean sslEnabled) {
|
||||
this.sslEnabled = sslEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslInvalidHostNameAllowed
|
||||
* @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean)
|
||||
*/
|
||||
public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) {
|
||||
this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslProvider
|
||||
* @see com.mongodb.connection.SslSettings.Builder#context(SSLContext)
|
||||
* @see SSLContext#getInstance(String)
|
||||
*/
|
||||
public void setSslProvider(String sslProvider) {
|
||||
this.sslProvider = sslProvider;
|
||||
}
|
||||
|
||||
// encryption and retry
|
||||
|
||||
/**
|
||||
* @param applicationName
|
||||
* @see MongoClientSettings.Builder#applicationName(String)
|
||||
*/
|
||||
public void setApplicationName(@Nullable String applicationName) {
|
||||
this.applicationName = applicationName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryReads
|
||||
* @see MongoClientSettings.Builder#retryReads(boolean)
|
||||
*/
|
||||
public void setRetryReads(@Nullable Boolean retryReads) {
|
||||
this.retryReads = retryReads;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readConcern
|
||||
* @see MongoClientSettings.Builder#readConcern(ReadConcern)
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param writeConcern
|
||||
* @see MongoClientSettings.Builder#writeConcern(WriteConcern)
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryWrites
|
||||
* @see MongoClientSettings.Builder#retryWrites(boolean)
|
||||
*/
|
||||
public void setRetryWrites(@Nullable Boolean retryWrites) {
|
||||
this.retryWrites = retryWrites;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readPreference
|
||||
* @see MongoClientSettings.Builder#readPreference(ReadPreference)
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param streamFactoryFactory
|
||||
* @see MongoClientSettings.Builder#streamFactoryFactory(StreamFactoryFactory)
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param codecRegistry
|
||||
* @see MongoClientSettings.Builder#codecRegistry(CodecRegistry)
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param uUidRepresentation
|
||||
*/
|
||||
public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) {
|
||||
this.uUidRepresentation = uUidRepresentation;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings)
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() {
|
||||
|
||||
Builder builder = MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.applicationName(applicationName) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.applyToClusterSettings((settings) -> {
|
||||
|
||||
settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
if (clusterConnectionMode != null) {
|
||||
settings.mode(clusterConnectionMode);
|
||||
}
|
||||
settings.requiredReplicaSetName(clusterRequiredReplicaSetName);
|
||||
|
||||
if (!CollectionUtils.isEmpty(clusterHosts)) {
|
||||
settings.hosts(clusterHosts);
|
||||
}
|
||||
settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(clusterMaxWaitQueueSize);
|
||||
settings.requiredClusterType(custerRequiredClusterType);
|
||||
|
||||
if (StringUtils.hasText(clusterSrvHost)) {
|
||||
settings.srvHost(clusterSrvHost);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings((settings) -> {
|
||||
|
||||
settings.minSize(poolMinSize);
|
||||
settings.maxSize(poolMaxSize);
|
||||
settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(poolMaxWaitQueueSize);
|
||||
settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToServerSettings((settings) -> {
|
||||
|
||||
settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToSocketSettings((settings) -> {
|
||||
|
||||
settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.receiveBufferSize(socketReceiveBufferSize);
|
||||
settings.sendBufferSize(socketSendBufferSize);
|
||||
}) //
|
||||
.applyToSslSettings((settings) -> {
|
||||
|
||||
settings.enabled(sslEnabled);
|
||||
if (sslEnabled) {
|
||||
|
||||
settings.invalidHostNameAllowed(sslInvalidHostNameAllowed);
|
||||
try {
|
||||
settings.context(
|
||||
StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault());
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (streamFactoryFactory != null) {
|
||||
builder = builder.streamFactoryFactory(streamFactoryFactory);
|
||||
}
|
||||
if (retryReads != null) {
|
||||
builder = builder.retryReads(retryReads);
|
||||
}
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
|
||||
if (uUidRepresentation != null) {
|
||||
builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
@@ -16,10 +16,9 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
import com.mongodb.WriteConcernResult;
|
||||
|
||||
/**
|
||||
* Mongo-specific {@link DataIntegrityViolationException}.
|
||||
@@ -30,18 +29,18 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
|
||||
private static final long serialVersionUID = -186980521176764046L;
|
||||
|
||||
private final WriteResult writeResult;
|
||||
private final WriteConcernResult writeResult;
|
||||
private final MongoActionOperation actionOperation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}.
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}.
|
||||
* @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}.
|
||||
* @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}.
|
||||
*/
|
||||
public MongoDataIntegrityViolationException(String message, WriteResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
|
||||
super(message);
|
||||
|
||||
@@ -53,11 +52,11 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link WriteResult} that caused the exception.
|
||||
* Returns the {@link WriteConcernResult} that caused the exception.
|
||||
*
|
||||
* @return the writeResult
|
||||
*/
|
||||
public WriteResult getWriteResult() {
|
||||
public WriteConcernResult getWriteResult() {
|
||||
return writeResult;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,260 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 3.0
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
*/
|
||||
public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
* {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(getDefaultDatabaseName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDatabaseFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,26 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} and {@link com.mongodb.MongoClient}
|
||||
* defining common properties such as database name and exception translator.
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
@@ -42,22 +27,16 @@ import com.mongodb.client.MongoDatabase;
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 2.1
|
||||
* @see SimpleMongoDbFactory
|
||||
* @see SimpleMongoClientDbFactory
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactorySupport} instead.
|
||||
*/
|
||||
public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
@Deprecated
|
||||
public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySupport<C> {
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
@@ -66,207 +45,6 @@ public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
*/
|
||||
protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, exceptionTranslator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return getDb(databaseName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return new MongoDbFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDbFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDbFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDbFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return delegate.getLegacyDb();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
@@ -37,7 +36,6 @@ import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
@@ -112,10 +110,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
@@ -135,6 +129,7 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
||||
return new MongoTransactionException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
|
||||
@@ -48,7 +48,6 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -224,9 +223,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
||||
* {@link Cursor}.
|
||||
* {@link com.mongodb.client.FindIterable}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -239,9 +239,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed
|
||||
* by a Mongo DB {@link Cursor}.
|
||||
* by a Mongo DB {@link com.mongodb.client.FindIterable}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -360,7 +361,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
IndexOperations indexOps(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level.
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level.
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
@@ -518,11 +519,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class. The name of the inputCollection is derived from the inputType of
|
||||
* the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* <p>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -537,11 +538,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class and are returned as stream. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -555,10 +556,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -574,10 +575,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -1373,10 +1374,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* <br />
|
||||
* support. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
|
||||
@@ -29,9 +29,7 @@ import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -54,24 +52,23 @@ import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.SessionSynchronization;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DeleteContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.QueryContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.UpdateContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
@@ -91,20 +88,12 @@ import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCre
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.*;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupBy;
|
||||
import org.springframework.data.mongodb.core.mapreduce.GroupByResults;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Meta;
|
||||
@@ -114,6 +103,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
@@ -128,11 +118,6 @@ import org.springframework.util.ResourceUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.DBCollection;
|
||||
import com.mongodb.DBCursor;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
@@ -141,6 +126,7 @@ import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.DistinctIterable;
|
||||
import com.mongodb.client.FindIterable;
|
||||
import com.mongodb.client.MapReduceIterable;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoCursor;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
@@ -173,8 +159,8 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Andreas Zink
|
||||
* @author Cimon Lucas
|
||||
* @author Michael J. Simons
|
||||
* @author Roman Puchkovskiy
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
|
||||
@@ -193,7 +179,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
@@ -201,6 +187,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final SpelAwareProxyProjectionFactory projectionFactory;
|
||||
private final EntityOperations operations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
private final QueryOperations queryOperations;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE;
|
||||
@@ -213,18 +200,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION;
|
||||
|
||||
/**
|
||||
* Constructor used for a basic template configuration.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @deprecated since 2.2 in favor of {@link #MongoTemplate(com.mongodb.client.MongoClient, String)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoTemplate(MongoClient mongoClient, String databaseName) {
|
||||
this(new SimpleMongoDbFactory(mongoClient, databaseName), (MongoConverter) null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor used for a basic template configuration.
|
||||
*
|
||||
@@ -232,8 +207,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @since 2.1
|
||||
*/
|
||||
public MongoTemplate(com.mongodb.client.MongoClient mongoClient, String databaseName) {
|
||||
this(new SimpleMongoClientDbFactory(mongoClient, databaseName), (MongoConverter) null);
|
||||
public MongoTemplate(MongoClient mongoClient, String databaseName) {
|
||||
this(new SimpleMongoClientDatabaseFactory(mongoClient, databaseName), (MongoConverter) null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -241,7 +216,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoTemplate(MongoDbFactory mongoDbFactory) {
|
||||
public MongoTemplate(MongoDatabaseFactory mongoDbFactory) {
|
||||
this(mongoDbFactory, (MongoConverter) null);
|
||||
}
|
||||
|
||||
@@ -251,7 +226,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mongoConverter
|
||||
*/
|
||||
public MongoTemplate(MongoDbFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) {
|
||||
public MongoTemplate(MongoDatabaseFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
|
||||
@@ -264,6 +239,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.projectionFactory = new SpelAwareProxyProjectionFactory();
|
||||
this.operations = new EntityOperations(this.mongoConverter.getMappingContext());
|
||||
this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext());
|
||||
this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations,
|
||||
mongoDbFactory);
|
||||
|
||||
// We always have a mapping context in the converter, whether it's a simple one or not
|
||||
mappingContext = this.mongoConverter.getMappingContext();
|
||||
@@ -281,7 +258,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
}
|
||||
|
||||
private MongoTemplate(MongoDbFactory dbFactory, MongoTemplate that) {
|
||||
private MongoTemplate(MongoDatabaseFactory dbFactory, MongoTemplate that) {
|
||||
|
||||
this.mongoDbFactory = dbFactory;
|
||||
this.exceptionTranslator = that.exceptionTranslator;
|
||||
@@ -302,6 +279,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.mappingContext = that.mappingContext;
|
||||
this.operations = that.operations;
|
||||
this.propertyOperations = that.propertyOperations;
|
||||
this.queryOperations = that.queryOperations;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -316,8 +294,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern}
|
||||
* configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no
|
||||
* {@link WriteConcern} will be used.
|
||||
* configured on the {@link MongoDatabaseFactory} will apply.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
@@ -449,8 +426,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext.getPersistentEntity(entityType);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(query.getFieldsObject(), persistentEntity, returnType);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), persistentEntity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
|
||||
Document mappedQuery = queryContext.getMappedQuery(persistentEntity);
|
||||
Document mappedFields = queryContext.getMappedFields(persistentEntity, returnType, projectionFactory);
|
||||
|
||||
FindIterable<Document> cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection,
|
||||
col -> col.find(mappedQuery, Document.class).projection(mappedFields));
|
||||
@@ -524,8 +503,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* specification, must not be {@literal null}.
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
* @param documentCallbackHandler the handler that will extract results, one document at a time
|
||||
* @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply
|
||||
* limits, skips and so on).
|
||||
* @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set,
|
||||
* (apply limits, skips and so on).
|
||||
*/
|
||||
protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler documentCallbackHandler,
|
||||
@Nullable CursorPreparer preparer) {
|
||||
@@ -845,10 +824,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
Assert.notNull(collectionName, "CollectionName must not be null!");
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass));
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entityClass, this::getPersistentEntity);
|
||||
|
||||
return execute(collectionName, new ExistsCallback(mappedQuery,
|
||||
operations.forType(entityClass).getCollation(query).map(Collation::toMongoCollation).orElse(null)));
|
||||
return execute(collectionName,
|
||||
new ExistsCallback(mappedQuery, queryContext.getCollation(entityClass).orElse(null)));
|
||||
}
|
||||
|
||||
// Find methods that take a Query to express the query and that return a List of objects.
|
||||
@@ -921,13 +901,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(resultClass, "ResultClass must not be null!");
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass != Object.class ? getPersistentEntity(entityClass) : null;
|
||||
DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field);
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity);
|
||||
String mappedFieldName = queryMapper.getMappedFields(new Document(field, 1), entity).keySet().iterator().next();
|
||||
|
||||
Class<T> mongoDriverCompatibleType = getMongoDbFactory().getCodecFor(resultClass) //
|
||||
.map(Codec::getEncoderClass) //
|
||||
.orElse((Class<T>) BsonValue.class);
|
||||
Document mappedQuery = distinctQueryContext.getMappedQuery(entity);
|
||||
String mappedFieldName = distinctQueryContext.getMappedFieldName(entity);
|
||||
Class<T> mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass);
|
||||
|
||||
MongoIterable<?> result = execute(collectionName, (collection) -> {
|
||||
|
||||
@@ -942,12 +920,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
DistinctIterable<T> iterable = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType);
|
||||
distinctQueryContext.applyCollation(entityClass, iterable::collation);
|
||||
|
||||
return operations.forType(entityClass) //
|
||||
.getCollation(query) //
|
||||
.map(Collation::toMongoCollation) //
|
||||
.map(iterable::collation) //
|
||||
.orElse(iterable);
|
||||
return iterable;
|
||||
});
|
||||
|
||||
if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) {
|
||||
@@ -956,7 +931,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
DefaultDbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory);
|
||||
|
||||
result = result.map((source) -> converter.mapValueToTargetType(source,
|
||||
getMostSpecificConversionTargetType(resultClass, entityClass, field), dbRefResolver));
|
||||
distinctQueryContext.getMostSpecificConversionTargetType(resultClass, entityClass), dbRefResolver));
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -966,32 +941,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param userType must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @param field must not be {@literal null}.
|
||||
* @return the most specific conversion target type depending on user preference and domain type property.
|
||||
* @since 2.1
|
||||
*/
|
||||
private static Class<?> getMostSpecificConversionTargetType(Class<?> userType, Class<?> domainType, String field) {
|
||||
|
||||
Class<?> conversionTargetType = userType;
|
||||
try {
|
||||
|
||||
Class<?> propertyType = PropertyPath.from(field, domainType).getLeafProperty().getLeafType();
|
||||
|
||||
// use the more specific type but favor UserType over property one
|
||||
if (ClassUtils.isAssignable(userType, propertyType)) {
|
||||
conversionTargetType = propertyType;
|
||||
}
|
||||
|
||||
} catch (PropertyReferenceException e) {
|
||||
// just don't care about it as we default to Object.class anyway.
|
||||
}
|
||||
|
||||
return conversionTargetType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass) {
|
||||
return geoNear(near, entityClass, getCollectionName(entityClass));
|
||||
@@ -1106,10 +1055,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.isTrue(query.getSkip() <= 0, "Query must not define skip.");
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityType);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity);
|
||||
Document mappedFields = queryMapper.getMappedFields(query.getFieldsObject(), entity);
|
||||
Document mappedSort = queryMapper.getMappedSort(query.getSortObject(), entity);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, resultType, projectionFactory);
|
||||
Document mappedSort = queryContext.getMappedSort(entity);
|
||||
|
||||
replacement = maybeCallBeforeConvert(replacement, collectionName);
|
||||
Document mappedReplacement = operations.forEntity(replacement).toMappedDocument(this.mongoConverter).getDocument();
|
||||
@@ -1117,9 +1067,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName));
|
||||
maybeCallBeforeSave(replacement, mappedReplacement, collectionName);
|
||||
|
||||
return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort,
|
||||
operations.forType(entityType).getCollation(query).map(Collation::toMongoCollation).orElse(null), entityType,
|
||||
mappedReplacement, options, resultType);
|
||||
T saved = doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort,
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, resultType);
|
||||
|
||||
if (saved != null) {
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, mappedReplacement, collectionName));
|
||||
return maybeCallAfterSave(saved, mappedReplacement, collectionName);
|
||||
}
|
||||
|
||||
return saved;
|
||||
}
|
||||
|
||||
// Find methods that take a Query to express the query and that return a single object that is also removed from the
|
||||
@@ -1165,23 +1121,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
CountOptions options = new CountOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
CountContext countContext = queryOperations.countQueryContext(query);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
options.hint(Document.parse(query.getHint()));
|
||||
}
|
||||
CountOptions options = countContext.getCountOptions(entityClass);
|
||||
Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity);
|
||||
|
||||
Document document = queryMapper.getMappedObject(query.getQueryObject(),
|
||||
Optional.ofNullable(entityClass).map(it -> mappingContext.getPersistentEntity(entityClass)));
|
||||
|
||||
return doCount(collectionName, document, options);
|
||||
return doCount(collectionName, mappedQuery, options);
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@@ -1224,7 +1169,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
protected void ensureNotIterable(@Nullable Object o) {
|
||||
if (null != o) {
|
||||
if (o != null) {
|
||||
if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
}
|
||||
@@ -1292,8 +1237,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
T saved = populateIdIfNecessary(initialized, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName));
|
||||
|
||||
return saved;
|
||||
return maybeCallAfterSave(saved, dbDoc, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -1386,8 +1330,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
if (i < ids.size()) {
|
||||
T saved = populateIdIfNecessary(obj, ids.get(i));
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, documentList.get(i), collectionName));
|
||||
savedObjects.add(saved);
|
||||
Document doc = documentList.get(i);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, doc, collectionName));
|
||||
savedObjects.add(maybeCallAfterSave(saved, doc, collectionName));
|
||||
} else {
|
||||
savedObjects.add(obj);
|
||||
}
|
||||
@@ -1457,7 +1402,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
maybeEmitEvent(new AfterSaveEvent<>(toSave, mapped.getDocument(), collectionName));
|
||||
|
||||
return toSave;
|
||||
return maybeCallAfterSave(toSave, mapped.getDocument(), collectionName);
|
||||
}
|
||||
|
||||
protected <T> T doSave(String collectionName, T objectToSave, MongoWriter<T> writer) {
|
||||
@@ -1478,7 +1423,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
T saved = populateIdIfNecessary(objectToSave, id);
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName));
|
||||
|
||||
return saved;
|
||||
return maybeCallAfterSave(saved, dbDoc, collectionName);
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@@ -1538,23 +1483,38 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass,
|
||||
dbDoc, null);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
MappedDocument mapped = MappedDocument.of(dbDoc);
|
||||
|
||||
MongoCollection<Document> collectionToUse = writeConcernToUse == null //
|
||||
? collection //
|
||||
: collection.withWriteConcern(writeConcernToUse);
|
||||
|
||||
if (!mapped.hasId()) {
|
||||
if (writeConcernToUse == null) {
|
||||
collection.insertOne(dbDoc);
|
||||
} else {
|
||||
collection.withWriteConcern(writeConcernToUse).insertOne(dbDoc);
|
||||
}
|
||||
} else if (writeConcernToUse == null) {
|
||||
collection.replaceOne(mapped.getIdFilter(), dbDoc, new ReplaceOptions().upsert(true));
|
||||
collectionToUse.insertOne(dbDoc);
|
||||
} else {
|
||||
collection.withWriteConcern(writeConcernToUse).replaceOne(mapped.getIdFilter(), dbDoc,
|
||||
new ReplaceOptions().upsert(true));
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true);
|
||||
Document replacement = updateContext.getMappedUpdate(entity);
|
||||
|
||||
Document filter = updateContext.getMappedQuery(entity);
|
||||
|
||||
if (updateContext.requiresShardKey(filter, entity)) {
|
||||
|
||||
if (entity.getShardKey().isImmutable()) {
|
||||
filter = updateContext.applyShardKey(entity, filter, null);
|
||||
} else {
|
||||
filter = updateContext.applyShardKey(entity, filter,
|
||||
collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first());
|
||||
}
|
||||
}
|
||||
|
||||
collectionToUse.replaceOne(filter, replacement, new ReplaceOptions().upsert(true));
|
||||
}
|
||||
return mapped.getId();
|
||||
});
|
||||
@@ -1629,33 +1589,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
|
||||
UpdateOptions opts = new UpdateOptions();
|
||||
opts.upsert(upsert);
|
||||
UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert)
|
||||
: queryOperations.updateSingleContext(update, query, upsert);
|
||||
updateContext.increaseVersionForUpdateIfNecessary(entity);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
opts.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
Document queryObj = updateContext.getMappedQuery(entity);
|
||||
UpdateOptions opts = updateContext.getUpdateOptions(entityClass);
|
||||
|
||||
Document queryObj = new Document();
|
||||
if (updateContext.isAggregationUpdate()) {
|
||||
|
||||
if (query != null) {
|
||||
queryObj.putAll(queryMapper.getMappedObject(query.getQueryObject(), entity));
|
||||
}
|
||||
|
||||
if (multi && update.isIsolated() && !queryObj.containsKey("$isolated")) {
|
||||
queryObj.put("$isolated", 1);
|
||||
}
|
||||
|
||||
if (update instanceof AggregationUpdate) {
|
||||
|
||||
AggregationOperationContext context = entityClass != null
|
||||
? new RelaxedTypeBasedAggregationOperationContext(entityClass, mappingContext, queryMapper)
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
List<Document> pipeline = new AggregationUtil(queryMapper, mappingContext)
|
||||
.createPipeline((AggregationUpdate) update, context);
|
||||
List<Document> pipeline = updateContext.getUpdatePipeline(entityClass);
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass,
|
||||
update.getUpdateObject(), queryObj);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
@@ -1664,44 +1611,42 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName);
|
||||
}
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName,
|
||||
entityClass, update.getUpdateObject(), queryObj);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection;
|
||||
|
||||
return multi ? collection.updateMany(queryObj, pipeline, opts) : collection.updateOne(queryObj, pipeline, opts);
|
||||
});
|
||||
}
|
||||
|
||||
Document updateObj = updateContext.getMappedUpdate(entity);
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass,
|
||||
updateObj, queryObj);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
operations.forType(entityClass) //
|
||||
.getCollation(query) //
|
||||
.map(Collation::toMongoCollation) //
|
||||
.ifPresent(opts::collation);
|
||||
|
||||
Document updateObj = update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Calling update using query: {} and update: {} in collection: {}", serializeToJsonSafely(queryObj),
|
||||
serializeToJsonSafely(updateObj), collectionName);
|
||||
}
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass,
|
||||
updateObj, queryObj);
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection;
|
||||
|
||||
if (!UpdateMapper.isUpdateObject(updateObj)) {
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.collation(opts.getCollation());
|
||||
replaceOptions.upsert(opts.isUpsert());
|
||||
Document filter = new Document(queryObj);
|
||||
|
||||
return collection.replaceOne(queryObj, updateObj, replaceOptions);
|
||||
if (updateContext.requiresShardKey(filter, entity)) {
|
||||
|
||||
if (entity.getShardKey().isImmutable()) {
|
||||
filter = updateContext.applyShardKey(entity, filter, null);
|
||||
} else {
|
||||
filter = updateContext.applyShardKey(entity, filter,
|
||||
collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first());
|
||||
}
|
||||
}
|
||||
|
||||
ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass);
|
||||
return collection.replaceOne(filter, updateObj, replaceOptions);
|
||||
} else {
|
||||
return multi ? collection.updateMany(queryObj, updateObj, opts)
|
||||
: collection.updateOne(queryObj, updateObj, opts);
|
||||
@@ -1709,17 +1654,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
});
|
||||
}
|
||||
|
||||
private void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity,
|
||||
UpdateDefinition update) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DeleteResult remove(Object object) {
|
||||
|
||||
@@ -1764,7 +1698,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
|
||||
MongoPersistentEntity<?> entity = getPersistentEntity(entityClass);
|
||||
Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), entity);
|
||||
|
||||
DeleteContext deleteContext = multi ? queryOperations.deleteQueryContext(query)
|
||||
: queryOperations.deleteSingleContext(query);
|
||||
Document queryObject = deleteContext.getMappedQuery(entity);
|
||||
DeleteOptions options = deleteContext.getDeleteOptions(entityClass);
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass,
|
||||
null, queryObject);
|
||||
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
@@ -1772,18 +1715,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
Document removeQuery = queryObject;
|
||||
|
||||
DeleteOptions options = new DeleteOptions();
|
||||
|
||||
operations.forType(entityClass) //
|
||||
.getCollation(query) //
|
||||
.map(Collation::toMongoCollation) //
|
||||
.ifPresent(options::collation);
|
||||
|
||||
MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass,
|
||||
null, queryObject);
|
||||
|
||||
WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.",
|
||||
new Object[] { serializeToJsonSafely(removeQuery), collectionName });
|
||||
@@ -1892,7 +1823,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
if (query.getMeta().getMaxTimeMsec() != null) {
|
||||
mapReduce = mapReduce.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
mapReduce = mapReduce.sort(getMappedSortObject(query, domainType));
|
||||
|
||||
Document mappedSort = getMappedSortObject(query, domainType);
|
||||
if (mappedSort != null && !mappedSort.isEmpty()) {
|
||||
mapReduce = mapReduce.sort(getMappedSortObject(query, domainType));
|
||||
}
|
||||
|
||||
mapReduce = mapReduce
|
||||
.filter(queryMapper.getMappedObject(query.getQueryObject(), mappingContext.getPersistentEntity(domainType)));
|
||||
@@ -2378,33 +2313,49 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
protected <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != eventPublisher) {
|
||||
if (eventPublisher != null) {
|
||||
eventPublisher.publishEvent(event);
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T maybeCallBeforeConvert(T object, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(BeforeConvertCallback.class, object, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <T> T maybeCallBeforeSave(T object, Document document, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(BeforeSaveCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
protected <T> T maybeCallAfterSave(T object, Document document, String collection) {
|
||||
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(AfterSaveCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
protected <T> T maybeCallAfterConvert(T object, Document document, String collection) {
|
||||
|
||||
if (entityCallbacks != null) {
|
||||
return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the specified collection using the provided options
|
||||
*
|
||||
@@ -2491,8 +2442,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Class<T> entityClass) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
Document mappedFields = queryMapper.getMappedObject(fields, entity);
|
||||
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query),
|
||||
@@ -2527,8 +2480,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @param query the query document that specifies the criteria used to find a record.
|
||||
* @param fields the document that specifies the fields to be returned.
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply
|
||||
* limits, skips and so on).
|
||||
* @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set,
|
||||
* (apply limits, skips and so on).
|
||||
* @return the {@link List} of converted objects.
|
||||
*/
|
||||
protected <T> List<T> doFind(String collectionName, Document query, Document fields, Class<T> entityClass,
|
||||
@@ -2542,8 +2495,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
Document mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
@@ -2565,8 +2519,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
|
||||
|
||||
Document mappedFields = getMappedFieldsObject(fields, entity, targetClass);
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, targetClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
@@ -2682,21 +2637,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
increaseVersionForUpdateIfNecessary(entity, update);
|
||||
UpdateContext updateContext = queryOperations.updateSingleContext(update, query, false);
|
||||
updateContext.increaseVersionForUpdateIfNecessary(entity);
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query, entity);
|
||||
|
||||
Object mappedUpdate;
|
||||
if (update instanceof AggregationUpdate) {
|
||||
|
||||
AggregationOperationContext context = entityClass != null
|
||||
? new RelaxedTypeBasedAggregationOperationContext(entityClass, mappingContext, queryMapper)
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
mappedUpdate = new AggregationUtil(queryMapper, mappingContext).createPipeline((Aggregation) update, context);
|
||||
} else {
|
||||
mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
}
|
||||
Document mappedQuery = updateContext.getMappedQuery(entity);
|
||||
Object mappedUpdate = updateContext.isAggregationUpdate() ? updateContext.getUpdatePipeline(entityClass)
|
||||
: updateContext.getMappedUpdate(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(
|
||||
@@ -2797,16 +2743,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* Internal method using callback to do queries against the datastore that requires reading a collection of objects.
|
||||
* It will take the following steps
|
||||
* <ol>
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link DBCursor}.</li>
|
||||
* <li>Prepare that {@link DBCursor} with the given {@link CursorPreparer} (will be skipped if {@link CursorPreparer}
|
||||
* is {@literal null}</li>
|
||||
* <li>Iterate over the {@link DBCursor} and applies the given {@link DocumentCallback} to each of the
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link FindIterable}.</li>
|
||||
* <li>Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if
|
||||
* {@link CursorPreparer} is {@literal null}</li>
|
||||
* <li>Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the
|
||||
* {@link Document}s collecting the actual result {@link List}.</li>
|
||||
* <ol>
|
||||
*
|
||||
* @param <T>
|
||||
* @param collectionCallback the callback to retrieve the {@link DBCursor} with
|
||||
* @param preparer the {@link CursorPreparer} to potentially modify the {@link DBCursor} before iterating over it
|
||||
* @param collectionCallback the callback to retrieve the {@link FindIterable} with
|
||||
* @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it
|
||||
* @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
@@ -2878,7 +2824,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return type != null ? mappingContext.getPersistentEntity(type) : null;
|
||||
}
|
||||
|
||||
private static MongoConverter getDefaultMongoConverter(MongoDbFactory factory) {
|
||||
private static MongoConverter getDefaultMongoConverter(MongoDatabaseFactory factory) {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory);
|
||||
MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList());
|
||||
@@ -2904,23 +2850,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
private Document getMappedFieldsObject(Document fields, @Nullable MongoPersistentEntity<?> entity,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
return queryMapper.getMappedFields(projectedFields, entity);
|
||||
}
|
||||
|
||||
return queryMapper.getMappedFields(projectedFields, mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to convert the given {@link RuntimeException} into a {@link DataAccessException} but returns the original
|
||||
* exception if the conversation failed. Thus allows safe re-throwing of the return value.
|
||||
@@ -2939,7 +2868,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
/**
|
||||
* Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification
|
||||
* {@link Document} and executes that against the {@link DBCollection}.
|
||||
* {@link Document} and executes that against the {@link MongoCollection}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Risberg
|
||||
@@ -2980,7 +2909,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
/**
|
||||
* Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification
|
||||
* {@link Document} and executes that against the {@link DBCollection}.
|
||||
* {@link Document} and executes that against the {@link MongoCollection}.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Risberg
|
||||
@@ -3037,7 +2966,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
/**
|
||||
* Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification
|
||||
* {@link Document} and executes that against the {@link DBCollection}.
|
||||
* {@link Document} and executes that against the {@link MongoCollection}.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
*/
|
||||
@@ -3182,6 +3111,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Roman Puchkovskiy
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private class ReadDocumentCallback<T> implements DocumentCallback<T> {
|
||||
@@ -3191,16 +3121,18 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final String collectionName;
|
||||
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document object) {
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(object, type, collectionName));
|
||||
T source = null;
|
||||
|
||||
if (document != null) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
source = reader.read(type, document);
|
||||
}
|
||||
|
||||
T source = reader.read(type, object);
|
||||
|
||||
if (null != source) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(object, source, collectionName));
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
source = maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
|
||||
return source;
|
||||
@@ -3229,24 +3161,23 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document object) {
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
if (object == null) {
|
||||
if (document == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Class<?> typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType
|
||||
: targetType;
|
||||
|
||||
if (null != object) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(object, targetType, collectionName));
|
||||
}
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName));
|
||||
|
||||
Object source = reader.read(typeToRead, object);
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
if (null != result) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName));
|
||||
if (result != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
|
||||
result = maybeCallAfterConvert(result, document, collectionName);
|
||||
}
|
||||
|
||||
return (T) result;
|
||||
@@ -3295,7 +3226,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
cursorToUse = cursorToUse.hint(Document.parse(query.getHint()));
|
||||
|
||||
String hint = query.getHint();
|
||||
|
||||
if (BsonUtils.isJsonDocument(hint)) {
|
||||
cursorToUse = cursorToUse.hint(BsonUtils.parse(hint, mongoDbFactory));
|
||||
} else {
|
||||
cursorToUse = cursorToUse.hintString(hint);
|
||||
}
|
||||
}
|
||||
|
||||
if (meta.hasValues()) {
|
||||
@@ -3304,14 +3242,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
cursorToUse = cursorToUse.comment(meta.getComment());
|
||||
}
|
||||
|
||||
if (meta.getSnapshot()) {
|
||||
cursorToUse = cursorToUse.snapshot(meta.getSnapshot());
|
||||
}
|
||||
|
||||
if (meta.getMaxScan() != null) {
|
||||
cursorToUse = cursorToUse.maxScan(meta.getMaxScan());
|
||||
}
|
||||
|
||||
if (meta.getMaxTimeMsec() != null) {
|
||||
cursorToUse = cursorToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
@@ -3396,7 +3326,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link CloseableIterator} that is backed by a MongoDB {@link Cursor}.
|
||||
* A {@link CloseableIterator} that is backed by a MongoDB {@link MongoCollection}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @since 1.7
|
||||
@@ -3409,7 +3339,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private DocumentCallback<T> objectReadCallback;
|
||||
|
||||
/**
|
||||
* Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link Cursor}.
|
||||
* Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}.
|
||||
*
|
||||
* @param cursor
|
||||
* @param exceptionTranslator
|
||||
@@ -3476,7 +3406,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
}
|
||||
|
||||
public MongoDbFactory getMongoDbFactory() {
|
||||
public MongoDatabaseFactory getMongoDbFactory() {
|
||||
return mongoDbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,763 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
|
||||
/**
|
||||
* {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed.
|
||||
* This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options
|
||||
* for {@literal count}, {@literal remove}, and other methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class QueryOperations {
|
||||
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final EntityOperations entityOperations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
private final CodecRegistryProvider codecRegistryProvider;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final AggregationUtil aggregationUtil;
|
||||
private final Map<Class<?>, Document> mappedShardKey = new ConcurrentHashMap<>(1);
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link QueryOperations}.
|
||||
*
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @param updateMapper must not be {@literal null}.
|
||||
* @param entityOperations must not be {@literal null}.
|
||||
* @param propertyOperations must not be {@literal null}.
|
||||
* @param codecRegistryProvider must not be {@literal null}.
|
||||
*/
|
||||
QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations,
|
||||
PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.entityOperations = entityOperations;
|
||||
this.propertyOperations = propertyOperations;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.mappingContext = queryMapper.getMappingContext();
|
||||
this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link QueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
QueryContext createQueryContext(Query query) {
|
||||
return new QueryContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link DistinctQueryContext}.
|
||||
*/
|
||||
DistinctQueryContext distinctQueryContext(Query query, String fieldName) {
|
||||
return new DistinctQueryContext(query, fieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link CountContext}.
|
||||
*/
|
||||
CountContext countQueryContext(Query query) {
|
||||
return new CountContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting multiple documents.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, true, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param replacement the {@link MappedDocument mapped replacement} document.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) {
|
||||
return new UpdateContext(replacement, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance removing all matching documents.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteQueryContext(Query query) {
|
||||
return new DeleteContext(query, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance only the first matching document.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteSingleContext(Query query) {
|
||||
return new DeleteContext(query, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
|
||||
* representation, mapping fieldnames, as well as determinging and applying {@link Collation collations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class QueryContext {
|
||||
|
||||
private final Query query;
|
||||
|
||||
/**
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be eihter a {@link Query} or a
|
||||
* plain {@link Document}.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
private QueryContext(@Nullable Query query) {
|
||||
this.query = query != null ? query : new Query();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Query getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Document getQueryObject() {
|
||||
return query.getQueryObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the
|
||||
* given{@literal domainType}
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable Class<T> domainType,
|
||||
Function<Class<T>, MongoPersistentEntity<?>> entityLookup) {
|
||||
return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> entity) {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields, entity);
|
||||
} else {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields,
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity != null && entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped {@link Query#getSortObject() sort} option.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document getMappedSort(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedSort(query.getSortObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param consumer must not be {@literal null}.
|
||||
*/
|
||||
void applyCollation(@Nullable Class<?> domainType, Consumer<com.mongodb.client.model.Collation> consumer) {
|
||||
getCollation(domainType).ifPresent(consumer::accept);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Optional<com.mongodb.client.model.Collation> getCollation(@Nullable Class<?> domainType) {
|
||||
|
||||
return entityOperations.forType(domainType).getCollation(query) //
|
||||
.map(Collation::toMongoCollation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DistinctQueryContext extends QueryContext {
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param fieldName must not be {@literal null}.
|
||||
*/
|
||||
private DistinctQueryContext(@Nullable Object query, String fieldName) {
|
||||
|
||||
super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query);
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType, ProjectionFactory projectionFactory) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(new Document(fieldName, 1), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mapped field name to project to.
|
||||
*
|
||||
* @param entity can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getMappedFieldName(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return getMappedFields(entity).keySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the MongoDB native representation of the given {@literal type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
<T> Class<T> getDriverCompatibleClass(Class<T> type) {
|
||||
|
||||
return codecRegistryProvider.getCodecFor(type) //
|
||||
.map(Codec::getEncoderClass) //
|
||||
.orElse((Class<T>) BsonValue.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the most speficic read target type based on the user {@literal requestedTargetType} an the property type
|
||||
* based on meta information extracted from the {@literal domainType}.
|
||||
*
|
||||
* @param requestedTargetType must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Class<?> getMostSpecificConversionTargetType(Class<?> requestedTargetType, Class<?> domainType) {
|
||||
|
||||
Class<?> conversionTargetType = requestedTargetType;
|
||||
try {
|
||||
|
||||
Class<?> propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType();
|
||||
|
||||
// use the more specific type but favor UserType over property one
|
||||
if (ClassUtils.isAssignable(requestedTargetType, propertyType)) {
|
||||
conversionTargetType = propertyType;
|
||||
}
|
||||
} catch (PropertyReferenceException e) {
|
||||
// just don't care about it as we default to Object.class anyway.
|
||||
}
|
||||
|
||||
return conversionTargetType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class CountContext extends QueryContext {
|
||||
|
||||
/**
|
||||
* Creates a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
CountContext(@Nullable Query query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType) {
|
||||
return getCountOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType, @Nullable Consumer<CountOptions> callback) {
|
||||
|
||||
CountOptions options = new CountOptions();
|
||||
Query query = getQuery();
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
|
||||
String hint = query.getHint();
|
||||
if (BsonUtils.isJsonDocument(hint)) {
|
||||
options.hint(BsonUtils.parse(hint, codecRegistryProvider));
|
||||
} else {
|
||||
options.hintString(hint);
|
||||
}
|
||||
}
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DeleteContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
|
||||
/**
|
||||
* Crate a new {@link DeleteContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one.
|
||||
*/
|
||||
DeleteContext(@Nullable Query query, boolean multi) {
|
||||
|
||||
super(query);
|
||||
this.multi = multi;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType) {
|
||||
return getDeleteOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType, @Nullable Consumer<DeleteOptions> callback) {
|
||||
|
||||
DeleteOptions options = new DeleteOptions();
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents shall be deleted.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}.
|
||||
*/
|
||||
class UpdateContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
private final boolean upsert;
|
||||
private final @Nullable UpdateDefinition update;
|
||||
private final @Nullable MappedDocument mappedDocument;
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) {
|
||||
this(update, new BasicQuery(query), multi, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) {
|
||||
|
||||
super(query);
|
||||
|
||||
this.multi = multi;
|
||||
this.upsert = upsert;
|
||||
this.update = update;
|
||||
this.mappedDocument = null;
|
||||
}
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
this.update = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType) {
|
||||
return getUpdateOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType, @Nullable Consumer<UpdateOptions> callback) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update != null && update.hasArrayFilters()) {
|
||||
options
|
||||
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType) {
|
||||
return getReplaceOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType, @Nullable Consumer<ReplaceOptions> callback) {
|
||||
|
||||
UpdateOptions updateOptions = getUpdateOptions(domainType);
|
||||
|
||||
ReplaceOptions options = new ReplaceOptions();
|
||||
options.collation(updateOptions.getCollation());
|
||||
options.upsert(updateOptions.isUpsert());
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> domainType) {
|
||||
|
||||
Document mappedQuery = super.getMappedQuery(domainType);
|
||||
|
||||
if (multi && update.isIsolated() && !mappedQuery.containsKey("$isolated")) {
|
||||
mappedQuery.put("$isolated", 1);
|
||||
}
|
||||
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
<T> Document applyShardKey(MongoPersistentEntity<T> domainType, Document filter, @Nullable Document existing) {
|
||||
|
||||
Document shardKeySource = existing != null ? existing
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity<?> domainType) {
|
||||
|
||||
return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType)
|
||||
&& !filter.keySet().containsAll(getMappedShardKeyFields(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities
|
||||
* {@literal id} property.
|
||||
* @since 3.0
|
||||
*/
|
||||
private boolean shardedById(MongoPersistentEntity<?> domainType) {
|
||||
|
||||
ShardKey shardKey = domainType.getShardKey();
|
||||
if (shardKey.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String key = shardKey.getPropertyNames().iterator().next();
|
||||
if ("_id".equals(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = domainType.getIdProperty();
|
||||
return idProperty != null && idProperty.getName().equals(key);
|
||||
}
|
||||
|
||||
Set<String> getMappedShardKeyFields(MongoPersistentEntity<?> entity) {
|
||||
return getMappedShardKey(entity).keySet();
|
||||
}
|
||||
|
||||
Document getMappedShardKey(MongoPersistentEntity<?> entity) {
|
||||
return mappedShardKey.computeIfAbsent(entity.getType(),
|
||||
key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
|
||||
|
||||
AggregationOperationContext context = domainType != null
|
||||
? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped update {@link Document}.
|
||||
*
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
Document getMappedUpdate(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (update != null) {
|
||||
return update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
}
|
||||
return mappedDocument.getDocument();
|
||||
}
|
||||
|
||||
/**
|
||||
* Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not
|
||||
* already done in the actual {@link UpdateDefinition}
|
||||
*
|
||||
* @param persistentEntity can be {@literal null}.
|
||||
*/
|
||||
void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
|
||||
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the update holds an aggregation pipeline.
|
||||
*/
|
||||
boolean isAggregationUpdate() {
|
||||
return update instanceof AggregationUpdate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents should be updated.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
@@ -38,13 +39,14 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Juergen Zimmermann
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFindOperation {
|
||||
@@ -144,6 +146,18 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -259,9 +273,21 @@ public interface ReactiveFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,6 +19,7 @@ import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -30,7 +31,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -146,6 +147,18 @@ public interface ReactiveMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -22,7 +22,7 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
|
||||
@@ -15,192 +15,17 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
* @deprecated since 3.0 - Use {@link MongoClientSettingsFactoryBean} instead.
|
||||
*/
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
@Deprecated
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends MongoClientSettingsFactoryBean {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private List<MongoCredential> credentialList = new ArrayList<>();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings();
|
||||
private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings();
|
||||
private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings();
|
||||
private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings();
|
||||
private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings();
|
||||
private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings();
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern}.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadConcern}.
|
||||
*
|
||||
* @param readConcern
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the List of {@link MongoCredential}s.
|
||||
*
|
||||
* @param credentialList must not be {@literal null}.
|
||||
*/
|
||||
public void setCredentialList(List<MongoCredential> credentialList) {
|
||||
|
||||
Assert.notNull(credentialList, "CredendialList must not be null!");
|
||||
|
||||
this.credentialList.addAll(credentialList);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the {@link MongoCredential} to the list of credentials.
|
||||
*
|
||||
* @param mongoCredential must not be {@literal null}.
|
||||
*/
|
||||
public void addMongoCredential(MongoCredential mongoCredential) {
|
||||
|
||||
Assert.notNull(mongoCredential, "MongoCredential must not be null!");
|
||||
|
||||
this.credentialList.add(mongoCredential);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link StreamFactoryFactory}.
|
||||
*
|
||||
* @param streamFactoryFactory
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link CodecRegistry}.
|
||||
*
|
||||
* @param codecRegistry
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClusterSettings}.
|
||||
*
|
||||
* @param clusterSettings
|
||||
*/
|
||||
public void setClusterSettings(ClusterSettings clusterSettings) {
|
||||
this.clusterSettings = clusterSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SocketSettings}.
|
||||
*
|
||||
* @param socketSettings
|
||||
*/
|
||||
public void setSocketSettings(SocketSettings socketSettings) {
|
||||
this.socketSettings = socketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the heartbeat {@link SocketSettings}.
|
||||
*
|
||||
* @param heartbeatSocketSettings
|
||||
*/
|
||||
public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) {
|
||||
this.heartbeatSocketSettings = heartbeatSocketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ConnectionPoolSettings}.
|
||||
*
|
||||
* @param connectionPoolSettings
|
||||
*/
|
||||
public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) {
|
||||
this.connectionPoolSettings = connectionPoolSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ServerSettings}.
|
||||
*
|
||||
* @param serverSettings
|
||||
*/
|
||||
public void setServerSettings(ServerSettings serverSettings) {
|
||||
this.serverSettings = serverSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SslSettings}.
|
||||
*
|
||||
* @param sslSettings
|
||||
*/
|
||||
public void setSslSettings(SslSettings sslSettings) {
|
||||
this.sslSettings = sslSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() throws Exception {
|
||||
|
||||
return MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.credentialList(credentialList) //
|
||||
.streamFactoryFactory(streamFactoryFactory) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.clusterSettings(clusterSettings) //
|
||||
.socketSettings(socketSettings) //
|
||||
.heartbeatSocketSettings(heartbeatSocketSettings) //
|
||||
.connectionPoolSettings(connectionPoolSettings) //
|
||||
.serverSettings(serverSettings) //
|
||||
.sslSettings(sslSettings) //
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
* @see Flux
|
||||
* @see Mono
|
||||
@@ -298,7 +299,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param collectionName name of the collection.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
Mono<MongoCollection<Document>> getCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -106,6 +107,18 @@ public interface ReactiveRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
interface ReactiveRemove<T> extends RemoveWithCollection<T> {}
|
||||
|
||||
@@ -15,12 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
|
||||
@@ -171,6 +172,18 @@ public interface ReactiveUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,7 +21,6 @@ import org.springframework.data.mongodb.core.script.ExecutableMongoScript;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.DB;
|
||||
|
||||
/**
|
||||
* Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,66 +18,63 @@ package org.springframework.data.mongodb.core;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author George Moraitis
|
||||
* @author Mark Paluch
|
||||
* @deprecated since 2.2 in favor of {@link SimpleMongoClientDbFactory}.
|
||||
* @since 3.0
|
||||
*/
|
||||
@Deprecated
|
||||
public class SimpleMongoDbFactory extends MongoDbFactorySupport<MongoClient> implements DisposableBean {
|
||||
public class SimpleMongoClientDatabaseFactory extends MongoDatabaseFactorySupport<MongoClient>
|
||||
implements DisposableBean {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClientURI}.
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance for the given {@code connectionString}.
|
||||
*
|
||||
* @param uri coordinates for a database connection. Must contain a database name and must not be {@literal null}.
|
||||
* @since 1.7
|
||||
* @param connectionString connection coordinates for a database connection. Must contain a database name and must not
|
||||
* be {@literal null} or empty.
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/connection-string/">MongoDB Connection String reference</a>
|
||||
*/
|
||||
public SimpleMongoDbFactory(MongoClientURI uri) {
|
||||
this(new MongoClient(uri), uri.getDatabase(), true);
|
||||
public SimpleMongoClientDatabaseFactory(String connectionString) {
|
||||
this(new ConnectionString(connectionString));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClient}.
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}.
|
||||
*
|
||||
* @param connectionString connection coordinates for a database connection. Must contain also a database name and not
|
||||
* be {@literal null}.
|
||||
*/
|
||||
public SimpleMongoClientDatabaseFactory(ConnectionString connectionString) {
|
||||
this(MongoClients.create(connectionString), connectionString.getDatabase(), true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @since 1.7
|
||||
*/
|
||||
public SimpleMongoDbFactory(MongoClient mongoClient, String databaseName) {
|
||||
public SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName) {
|
||||
this(mongoClient, databaseName, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param mongoClient
|
||||
* @param databaseName
|
||||
* Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated
|
||||
* @since 1.7
|
||||
*/
|
||||
private SimpleMongoDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) {
|
||||
SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) {
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, new MongoExceptionTranslator());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return getMongoClient().getDB(getDefaultDatabaseName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
@@ -87,7 +84,7 @@ public class SimpleMongoDbFactory extends MongoDbFactorySupport<MongoClient> imp
|
||||
return getMongoClient().startSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#closeClient()
|
||||
*/
|
||||
@@ -96,7 +93,7 @@ public class SimpleMongoDbFactory extends MongoDbFactorySupport<MongoClient> imp
|
||||
getMongoClient().close();
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#doGetMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@@ -15,12 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
@@ -30,8 +25,10 @@ import com.mongodb.client.MongoDatabase;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @deprecated since 3.0, use {@link SimpleMongoClientDatabaseFactory} instead.
|
||||
*/
|
||||
public class SimpleMongoClientDbFactory extends MongoDbFactorySupport<MongoClient> implements DisposableBean {
|
||||
@Deprecated
|
||||
public class SimpleMongoClientDbFactory extends SimpleMongoClientDatabaseFactory {
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleMongoClientDbFactory} instance for the given {@code connectionString}.
|
||||
@@ -72,45 +69,6 @@ public class SimpleMongoClientDbFactory extends MongoDbFactorySupport<MongoClien
|
||||
* @param mongoInstanceCreated
|
||||
*/
|
||||
private SimpleMongoClientDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) {
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, new MongoExceptionTranslator());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
|
||||
throw new UnsupportedOperationException(String.format(
|
||||
"%s does not support legacy DBObject API! Please consider using SimpleMongoDbFactory for that purpose.",
|
||||
MongoClient.class));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return getMongoClient().startSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#closeClient()
|
||||
*/
|
||||
@Override
|
||||
protected void closeClient() {
|
||||
getMongoClient().close();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoDbFactoryBase#doGetMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
protected MongoDatabase doGetMongoDatabase(String dbName) {
|
||||
return getMongoClient().getDatabase(dbName);
|
||||
super(mongoClient, databaseName, mongoInstanceCreated);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import lombok.Value;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.beans.factory.DisposableBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
@@ -41,6 +42,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory {
|
||||
@@ -99,7 +101,7 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
public Mono<MongoDatabase> getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(databaseName);
|
||||
}
|
||||
|
||||
@@ -107,12 +109,16 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
public Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty.");
|
||||
|
||||
MongoDatabase db = mongo.getDatabase(dbName);
|
||||
return writeConcern != null ? db.withWriteConcern(writeConcern) : db;
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
MongoDatabase db = mongo.getDatabase(dbName);
|
||||
|
||||
return writeConcern != null ? db.withWriteConcern(writeConcern) : db;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -135,6 +141,15 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry()
|
||||
*/
|
||||
@Override
|
||||
public CodecRegistry getCodecRegistry() {
|
||||
return this.mongo.getDatabase(databaseName).getCodecRegistry();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
@@ -171,8 +186,8 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return decorateDatabase(delegate.getMongoDatabase());
|
||||
public Mono<MongoDatabase> getMongoDatabase() throws DataAccessException {
|
||||
return delegate.getMongoDatabase().map(this::decorateDatabase);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -180,8 +195,8 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return decorateDatabase(delegate.getMongoDatabase(dbName));
|
||||
public Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return delegate.getMongoDatabase(dbName).map(this::decorateDatabase);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -193,6 +208,15 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getCodecRegistry()
|
||||
*/
|
||||
@Override
|
||||
public CodecRegistry getCodecRegistry() {
|
||||
return delegate.getCodecRegistry();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
@@ -100,14 +101,14 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return value;
|
||||
}
|
||||
|
||||
protected List<Object> append(Object value) {
|
||||
protected List<Object> append(Object value, Expand expandList) {
|
||||
|
||||
if (this.value instanceof List) {
|
||||
|
||||
List<Object> clone = new ArrayList<Object>((List) this.value);
|
||||
|
||||
if (value instanceof List) {
|
||||
clone.addAll((List) value);
|
||||
if (value instanceof Collection && Expand.EXPAND_VALUES.equals(expandList)) {
|
||||
clone.addAll((Collection<?>) value);
|
||||
} else {
|
||||
clone.add(value);
|
||||
}
|
||||
@@ -117,6 +118,17 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return Arrays.asList(this.value, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expand a nested list of values to single entries or keep the list.
|
||||
*/
|
||||
protected enum Expand {
|
||||
EXPAND_VALUES, KEEP_SOURCE
|
||||
}
|
||||
|
||||
protected List<Object> append(Object value) {
|
||||
return append(value, Expand.EXPAND_VALUES);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected java.util.Map<String, Object> append(String key, Object value) {
|
||||
|
||||
|
||||
@@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder.ValueAppender;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Adds new fields to documents. {@code $addFields} outputs documents that contain all existing fields from the input
|
||||
* documents and newly added fields.
|
||||
*
|
||||
* <pre class="code">
|
||||
* AddFieldsOperation.addField("totalHomework").withValue("A+").and().addField("totalQuiz").withValue("B-")
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/addFields/">MongoDB Aggregation
|
||||
* Framework: $addFields</a>
|
||||
*/
|
||||
public class AddFieldsOperation extends DocumentEnhancingOperation {
|
||||
|
||||
/**
|
||||
* Create new instance of {@link AddFieldsOperation} adding map keys as exposed fields.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private AddFieldsOperation(Map<Object, Object> source) {
|
||||
super(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new instance of {@link AddFieldsOperation}
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @param value can be {@literal null}.
|
||||
*/
|
||||
public AddFieldsOperation(Object field, @Nullable Object value) {
|
||||
this(Collections.singletonMap(field, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the {@link AddFieldsOperation} via {@link AddFieldsOperationBuilder}.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public static AddFieldsOperationBuilder builder() {
|
||||
return new AddFieldsOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate another field to add.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public static ValueAppender addField(String field) {
|
||||
return new AddFieldsOperationBuilder().addField(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the value for a specific field to the operation.
|
||||
*
|
||||
* @param field the target field to add.
|
||||
* @param value the value to assign.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
public AddFieldsOperation addField(Object field, Object value) {
|
||||
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(getValueMap());
|
||||
target.put(field, value);
|
||||
|
||||
return new AddFieldsOperation(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate additional fields to add.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
*/
|
||||
public AddFieldsOperationBuilder and() {
|
||||
return new AddFieldsOperationBuilder(getValueMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String mongoOperator() {
|
||||
return "$addFields";
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public static class AddFieldsOperationBuilder {
|
||||
|
||||
private final Map<Object, Object> valueMap;
|
||||
|
||||
private AddFieldsOperationBuilder() {
|
||||
this.valueMap = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
private AddFieldsOperationBuilder(Map<Object, Object> source) {
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
}
|
||||
|
||||
public AddFieldsOperationBuilder addFieldWithValue(String field, @Nullable Object value) {
|
||||
return addField(field).withValue(value);
|
||||
}
|
||||
|
||||
public AddFieldsOperationBuilder addFieldWithValueOf(String field, Object value) {
|
||||
return addField(field).withValueOf(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the field to add.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return new instance of {@link ValueAppender}.
|
||||
*/
|
||||
public ValueAppender addField(String field) {
|
||||
|
||||
return new ValueAppender() {
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValue(Object value) {
|
||||
|
||||
valueMap.put(field, value);
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValueOf(Object value) {
|
||||
|
||||
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values) {
|
||||
|
||||
valueMap.put(field, new ExpressionProjection(operation, values));
|
||||
return AddFieldsOperationBuilder.this;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public AddFieldsOperation build() {
|
||||
return new AddFieldsOperation(valueMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface ValueAppender {
|
||||
|
||||
/**
|
||||
* Define the value to assign as is.
|
||||
*
|
||||
* @param value can be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValue(@Nullable Object value);
|
||||
|
||||
/**
|
||||
* Define the value to assign. Plain {@link String} values are treated as {@link Field field references}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValueOf(Object value);
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return new instance of {@link AddFieldsOperation}.
|
||||
*/
|
||||
AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,9 +23,11 @@ import java.util.List;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
@@ -117,7 +119,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationUpdate} from the given {@link AggregationOperation}s.
|
||||
*
|
||||
*
|
||||
* @param operations can be {@literal empty} but must not be {@literal null}.
|
||||
* @return new instance of {@link AggregationUpdate}.
|
||||
* @since 3.0
|
||||
@@ -200,11 +202,16 @@ public class Aggregation {
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
Assert.notNull(options, "AggregationOptions must not be null!");
|
||||
|
||||
// check $out is the last operation if it exists
|
||||
// check $out/$merge is the last operation if it exists
|
||||
for (AggregationOperation aggregationOperation : aggregationOperations) {
|
||||
|
||||
if (aggregationOperation instanceof OutOperation && !isLast(aggregationOperation, aggregationOperations)) {
|
||||
throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline.");
|
||||
}
|
||||
|
||||
if (aggregationOperation instanceof MergeOperation && !isLast(aggregationOperation, aggregationOperations)) {
|
||||
throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline.");
|
||||
}
|
||||
}
|
||||
|
||||
this.operations = aggregationOperations;
|
||||
@@ -234,6 +241,20 @@ public class Aggregation {
|
||||
return "_id";
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
|
||||
* <p/>
|
||||
* Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is
|
||||
* an alias for {@code $addFields}.
|
||||
*
|
||||
* @return new instance of {@link AddFieldsOperationBuilder}.
|
||||
* @see AddFieldsOperation
|
||||
* @since 3.0
|
||||
*/
|
||||
public static AddFieldsOperationBuilder addFields() {
|
||||
return AddFieldsOperation.builder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including the given fields.
|
||||
*
|
||||
@@ -493,6 +514,30 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link MergeOperationBuilder builder} instance to create a new {@link MergeOperation}.
|
||||
*
|
||||
* @return new instance of {@link MergeOperationBuilder}.
|
||||
* @see MergeOperation
|
||||
* @since 3.0
|
||||
*/
|
||||
public static MergeOperationBuilder merge() {
|
||||
return MergeOperation.builder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link OutOperation} using the given collection name. This operation must be the last operation in
|
||||
* the pipeline.
|
||||
@@ -612,6 +657,26 @@ public class Aggregation {
|
||||
return new CountOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link RedactOperation} that can restrict the content of a document based on information stored
|
||||
* within the document itself.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* Aggregation.redact(ConditionalOperators.when(Criteria.where("level").is(5)) //
|
||||
* .then(RedactOperation.PRUNE) //
|
||||
* .otherwise(RedactOperation.DESCEND));
|
||||
* </pre>
|
||||
*
|
||||
* @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or
|
||||
* {@literal $$KEEP}. Must not be {@literal null}.
|
||||
* @return new instance of {@link RedactOperation}. Never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public static RedactOperation redact(AggregationExpression condition) {
|
||||
return new RedactOperation(condition);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Fields} instance for the given field names.
|
||||
*
|
||||
@@ -634,19 +699,6 @@ public class Aggregation {
|
||||
return Fields.from(field(name, target));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param distanceField must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @since 1.7
|
||||
*/
|
||||
public static GeoNearOperation geoNear(NearQuery query, String distanceField) {
|
||||
return new GeoNearOperation(query, distanceField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions.Builder}.
|
||||
*
|
||||
|
||||
@@ -20,7 +20,6 @@ import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -103,4 +102,16 @@ public interface AggregationOperationContext {
|
||||
.map(PropertyDescriptor::getName) //
|
||||
.toArray(String[]::new));
|
||||
}
|
||||
|
||||
/**
|
||||
* This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for
|
||||
* its existence. Typically the {@link AggregationOperationContext} fails when referencing unknown fields, those that
|
||||
* are not present in one of the previous stages or the input source, throughout the pipeline.
|
||||
*
|
||||
* @return a more relaxed {@link AggregationOperationContext}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default AggregationOperationContext continueOnMissingFieldReference() {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,8 +23,6 @@ import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support
|
||||
* aggregation options can be found in the MongoDB reference documentation
|
||||
@@ -115,7 +113,7 @@ public class AggregationOptions {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationOptions} given {@link DBObject} containing aggregation options.
|
||||
* Creates new {@link AggregationOptions} given {@link Document} containing aggregation options.
|
||||
*
|
||||
* @param document must not be {@literal null}.
|
||||
* @return the {@link AggregationOptions}.
|
||||
|
||||
@@ -79,7 +79,7 @@ import org.springframework.util.Assert;
|
||||
public class AggregationUpdate extends Aggregation implements UpdateDefinition {
|
||||
|
||||
private boolean isolated = false;
|
||||
private Set<String> keysTouched = new HashSet<>();
|
||||
private final Set<String> keysTouched = new HashSet<>();
|
||||
|
||||
/**
|
||||
* Create new {@link AggregationUpdate}.
|
||||
|
||||
@@ -411,7 +411,7 @@ public class ComparisonOperators {
|
||||
public Cmp compareToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Cmp(append(value));
|
||||
return new Cmp(append(value, Expand.KEEP_SOURCE));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -488,7 +488,7 @@ public class ComparisonOperators {
|
||||
public Eq equalToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Eq(append(value));
|
||||
return new Eq(append(value, Expand.KEEP_SOURCE));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -873,7 +873,7 @@ public class ComparisonOperators {
|
||||
public Ne notEqualToValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Ne(append(value));
|
||||
return new Ne(append(value, Expand.KEEP_SOURCE));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,166 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Base class for common tasks required by {@link SetOperation} and {@link AddFieldsOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
abstract class DocumentEnhancingOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
private final Map<Object, Object> valueMap;
|
||||
|
||||
private ExposedFields exposedFields = ExposedFields.empty();
|
||||
|
||||
protected DocumentEnhancingOperation(Map<Object, Object> source) {
|
||||
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
for (Object key : source.keySet()) {
|
||||
this.exposedFields = add(key);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext(
|
||||
exposedFields, context);
|
||||
|
||||
if (valueMap.size() == 1) {
|
||||
return context.getMappedObject(
|
||||
new Document(mongoOperator(), toSetEntry(valueMap.entrySet().iterator().next(), operationContext)));
|
||||
}
|
||||
|
||||
Document $set = new Document();
|
||||
valueMap.entrySet().stream().map(it -> toSetEntry(it, operationContext)).forEach($set::putAll);
|
||||
return context.getMappedObject(new Document(mongoOperator(), $set));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the String representation of the native MongoDB operator.
|
||||
*/
|
||||
protected abstract String mongoOperator();
|
||||
|
||||
/**
|
||||
* @return the raw value map
|
||||
*/
|
||||
protected Map<Object, Object> getValueMap() {
|
||||
return this.valueMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return exposedFields;
|
||||
}
|
||||
|
||||
private ExposedFields add(Object field) {
|
||||
|
||||
if (field instanceof Field) {
|
||||
return exposedFields.and(new ExposedField((Field) field, true));
|
||||
}
|
||||
if (field instanceof String) {
|
||||
return exposedFields.and(new ExposedField(Fields.field((String) field), true));
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected %s to be a field/property.", field));
|
||||
}
|
||||
|
||||
private static Document toSetEntry(Entry<Object, Object> entry, AggregationOperationContext context) {
|
||||
|
||||
String field = entry.getKey() instanceof String ? context.getReference((String) entry.getKey()).getRaw()
|
||||
: context.getReference((Field) entry.getKey()).getRaw();
|
||||
|
||||
Object value = computeValue(entry.getValue(), context);
|
||||
|
||||
return new Document(field, value);
|
||||
}
|
||||
|
||||
private static Object computeValue(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
return context.getReference((Field) value).toString();
|
||||
}
|
||||
|
||||
if (value instanceof ExpressionProjection) {
|
||||
return ((ExpressionProjection) value).toExpression(context);
|
||||
}
|
||||
|
||||
if (value instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) value).toDocument(context);
|
||||
}
|
||||
|
||||
if (value instanceof Collection) {
|
||||
return ((Collection<?>) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link AggregationExpression} based on a SpEL expression.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
static class ExpressionProjection {
|
||||
|
||||
private static final SpelExpressionTransformer TRANSFORMER = new SpelExpressionTransformer();
|
||||
|
||||
private final String expression;
|
||||
private final Object[] params;
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation.ExpressionProjectionOperationBuilder.ExpressionProjection} for the given
|
||||
* field, SpEL expression and parameters.
|
||||
*
|
||||
* @param expression must not be {@literal null} or empty.
|
||||
* @param parameters must not be {@literal null}.
|
||||
*/
|
||||
ExpressionProjection(String expression, Object[] parameters) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
Assert.notNull(parameters, "Parameters must not be null!");
|
||||
|
||||
this.expression = expression;
|
||||
this.params = parameters.clone();
|
||||
}
|
||||
|
||||
Object toExpression(AggregationOperationContext context) {
|
||||
return TRANSFORMER.transform(expression, context, params);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,594 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.VariableOperators.Let;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $merge}-operation.
|
||||
* <p>
|
||||
* We recommend to use the {@link MergeOperationBuilder builder} via {@link MergeOperation#builder()} instead of
|
||||
* creating instances of this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/merge/">MongoDB Documentation</a>
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MergeOperation implements FieldsExposingAggregationOperation, InheritsFieldsAggregationOperation {
|
||||
|
||||
private final MergeOperationTarget into;
|
||||
private final UniqueMergeId on;
|
||||
private final @Nullable Let let;
|
||||
private final @Nullable WhenDocumentsMatch whenMatched;
|
||||
private final @Nullable WhenDocumentsDontMatch whenNotMatched;
|
||||
|
||||
/**
|
||||
* Create new instance of {@link MergeOperation}.
|
||||
*
|
||||
* @param into the target (collection and database)
|
||||
* @param on the unique identifier. Can be {@literal null}.
|
||||
* @param let exposed variables for {@link WhenDocumentsMatch#updateWith(Aggregation)}. Can be {@literal null}.
|
||||
* @param whenMatched behavior if a result document matches an existing one in the target collection. Can be
|
||||
* {@literal null}.
|
||||
* @param whenNotMatched behavior if a result document does not match an existing one in the target collection. Can be
|
||||
* {@literal null}.
|
||||
*/
|
||||
public MergeOperation(MergeOperationTarget into, UniqueMergeId on, @Nullable Let let,
|
||||
@Nullable WhenDocumentsMatch whenMatched, @Nullable WhenDocumentsDontMatch whenNotMatched) {
|
||||
|
||||
Assert.notNull(into, "Into must not be null! Please provide a target collection.");
|
||||
Assert.notNull(on, "On must not be null! Use UniqueMergeId.id() instead.");
|
||||
|
||||
this.into = into;
|
||||
this.on = on;
|
||||
this.let = let;
|
||||
this.whenMatched = whenMatched;
|
||||
this.whenNotMatched = whenNotMatched;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simplified form to apply all default options for {@code $merge} (including writing to a collection in the same
|
||||
* database).
|
||||
*
|
||||
* @param collection the output collection within the same database.
|
||||
* @return new instance of {@link MergeOperation}.
|
||||
*/
|
||||
public static MergeOperation mergeInto(String collection) {
|
||||
return builder().intoCollection(collection).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Access the {@link MergeOperationBuilder builder API} to create a new instance of {@link MergeOperation}.
|
||||
*
|
||||
* @return new instance of {@link MergeOperationBuilder}.
|
||||
*/
|
||||
public static MergeOperationBuilder builder() {
|
||||
return new MergeOperationBuilder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.Aggregation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (isJustCollection()) {
|
||||
return new Document("$merge", into.collection);
|
||||
}
|
||||
|
||||
Document $merge = new Document();
|
||||
$merge.putAll(into.toDocument(context));
|
||||
|
||||
if (!on.isJustIdField()) {
|
||||
$merge.putAll(on.toDocument(context));
|
||||
}
|
||||
|
||||
if (let != null) {
|
||||
$merge.append("let", let.toDocument(context).get("$let", Document.class).get("vars"));
|
||||
}
|
||||
|
||||
if (whenMatched != null) {
|
||||
$merge.putAll(whenMatched.toDocument(context));
|
||||
}
|
||||
|
||||
if (whenNotMatched != null) {
|
||||
$merge.putAll(whenNotMatched.toDocument(context));
|
||||
}
|
||||
|
||||
return new Document("$merge", $merge);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
if (let == null) {
|
||||
return ExposedFields.from();
|
||||
}
|
||||
|
||||
return ExposedFields.synthetic(Fields.fields(let.getVariableNames()));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation#inheritsFields()
|
||||
*/
|
||||
@Override
|
||||
public boolean inheritsFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if nothing more than the collection is specified.
|
||||
*/
|
||||
private boolean isJustCollection() {
|
||||
return into.isTargetingSameDatabase() && on.isJustIdField() && let == null && whenMatched == null
|
||||
&& whenNotMatched == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object representing the unique id used during the merge operation to identify duplicates in the target
|
||||
* collection.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class UniqueMergeId {
|
||||
|
||||
private static final UniqueMergeId ID = new UniqueMergeId(Collections.emptyList());
|
||||
|
||||
private final Collection<String> uniqueIdentifier;
|
||||
|
||||
private UniqueMergeId(Collection<String> uniqueIdentifier) {
|
||||
this.uniqueIdentifier = uniqueIdentifier;
|
||||
}
|
||||
|
||||
public static UniqueMergeId ofIdFields(String... fields) {
|
||||
|
||||
Assert.noNullElements(fields, "Fields must not contain null values!");
|
||||
|
||||
if (ObjectUtils.isEmpty(fields)) {
|
||||
return id();
|
||||
}
|
||||
|
||||
return new UniqueMergeId(Arrays.asList(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge Documents by using the MongoDB {@literal _id} field.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static UniqueMergeId id() {
|
||||
return ID;
|
||||
}
|
||||
|
||||
boolean isJustIdField() {
|
||||
return this.equals(ID);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
List<String> mappedOn = uniqueIdentifier.stream().map(context::getReference).map(FieldReference::getRaw)
|
||||
.collect(Collectors.toList());
|
||||
return new Document("on", mappedOn.size() == 1 ? mappedOn.iterator().next() : mappedOn);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object representing the {@code into} field of a {@code $merge} aggregation stage. <br />
|
||||
* If not stated explicitly via {@link MergeOperationTarget#inDatabase(String)} the {@literal collection} is created
|
||||
* in the very same {@literal database}. In this case {@code into} is just a single String holding the collection
|
||||
* name. <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
* into: "target-collection-name"
|
||||
* </pre>
|
||||
*
|
||||
* If the collection needs to be in a different database {@code into} will be a {@link Document} like the following
|
||||
*
|
||||
* <pre class="code">
|
||||
* {
|
||||
* into: {}
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MergeOperationTarget {
|
||||
|
||||
private final @Nullable String database;
|
||||
private final String collection;
|
||||
|
||||
private MergeOperationTarget(@Nullable String database, String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
this.database = database;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param collection The output collection results will be stored in. Must not be {@literal null}.
|
||||
* @return new instance of {@link MergeOperationTarget}.
|
||||
*/
|
||||
public static MergeOperationTarget collection(String collection) {
|
||||
return new MergeOperationTarget(null, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the target database if different from the source one.
|
||||
*
|
||||
* @param database must not be {@literal null}.
|
||||
* @return new instance of {@link MergeOperationTarget}.
|
||||
*/
|
||||
public MergeOperationTarget inDatabase(String database) {
|
||||
return new MergeOperationTarget(database, collection);
|
||||
}
|
||||
|
||||
boolean isTargetingSameDatabase() {
|
||||
return !StringUtils.hasText(database);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
return new Document("into",
|
||||
!StringUtils.hasText(database) ? collection : new Document("db", database).append("coll", collection));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object specifying how to deal with a result document that matches an existing document in the collection
|
||||
* based on the fields of the {@code on} property describing the unique identifier.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class WhenDocumentsMatch {
|
||||
|
||||
private final Object value;
|
||||
|
||||
private WhenDocumentsMatch(Object value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static WhenDocumentsMatch whenMatchedOf(String value) {
|
||||
return new WhenDocumentsMatch(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the existing document in the output collection with the matching results document.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch replaceDocument() {
|
||||
return whenMatchedOf("replace");
|
||||
}
|
||||
|
||||
/**
|
||||
* Keep the existing document in the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch keepExistingDocument() {
|
||||
return whenMatchedOf("keepExisting");
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge the matching documents. Please see the MongoDB reference documentation for details.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch mergeDocuments() {
|
||||
return whenMatchedOf("merge");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop and fail the aggregation operation. Does not revert already performed changes on previous documents.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch failOnMatch() {
|
||||
return whenMatchedOf("fail");
|
||||
}
|
||||
|
||||
/**
|
||||
* Use an {@link Aggregation} to update the document in the collection. Please see the MongoDB reference
|
||||
* documentation for details.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch updateWith(Aggregation aggregation) {
|
||||
return new WhenDocumentsMatch(aggregation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use an aggregation pipeline to update the document in the collection. Please see the MongoDB reference
|
||||
* documentation for details.
|
||||
*
|
||||
* @param aggregationPipeline must not be {@literal null}.
|
||||
* @return new instance of {@link WhenDocumentsMatch}.
|
||||
*/
|
||||
public static WhenDocumentsMatch updateWith(List<AggregationOperation> aggregationPipeline) {
|
||||
return new WhenDocumentsMatch(aggregationPipeline);
|
||||
}
|
||||
|
||||
Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Aggregation) {
|
||||
return new Document("whenMatched", ((Aggregation) value).toPipeline(context));
|
||||
}
|
||||
|
||||
return new Document("whenMatched", value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value Object specifying how to deal with a result document that do not match an existing document in the collection
|
||||
* based on the fields of the {@code on} property describing the unique identifier.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class WhenDocumentsDontMatch {
|
||||
|
||||
private final String value;
|
||||
|
||||
private WhenDocumentsDontMatch(String value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method creating {@link WhenDocumentsDontMatch} from a {@code value} literal.
|
||||
*
|
||||
* @param value
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch whenNotMatchedOf(String value) {
|
||||
return new WhenDocumentsDontMatch(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert the document into the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch insertNewDocument() {
|
||||
return whenNotMatchedOf("insert");
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard the document - do not insert the document into the output collection.
|
||||
*
|
||||
* @return new instance of {@link WhenDocumentsDontMatch}.
|
||||
*/
|
||||
public static WhenDocumentsDontMatch discardDocument() {
|
||||
return whenNotMatchedOf("discard");
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop and fail the aggregation operation. Does not revert already performed changes on previous documents.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static WhenDocumentsDontMatch failWhenNotMatch() {
|
||||
return whenNotMatchedOf("fail");
|
||||
}
|
||||
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("whenNotMatched", value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder API to construct a {@link MergeOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MergeOperationBuilder {
|
||||
|
||||
private String collection;
|
||||
private @Nullable String database;
|
||||
private UniqueMergeId id = UniqueMergeId.id();
|
||||
private @Nullable Let let;
|
||||
private @Nullable WhenDocumentsMatch whenMatched;
|
||||
private @Nullable WhenDocumentsDontMatch whenNotMatched;
|
||||
|
||||
public MergeOperationBuilder() {}
|
||||
|
||||
/**
|
||||
* Required output collection name to store results to.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor empty.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder intoCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||
|
||||
this.collection = collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally define a target database if different from the current one.
|
||||
*
|
||||
* @param database must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder inDatabase(String database) {
|
||||
|
||||
this.database = database;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the target to store results in.
|
||||
*
|
||||
* @param into must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder into(MergeOperationTarget into) {
|
||||
|
||||
this.database = into.database;
|
||||
this.collection = into.collection;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the target to store results in.
|
||||
*
|
||||
* @param target must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder target(MergeOperationTarget target) {
|
||||
return into(target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a single field or multiple fields that act as a unique identifier for a document. The identifier
|
||||
* determines if a results document matches an already existing document in the output collection. <br />
|
||||
* The aggregation results documents must contain the field(s) specified via {@code on}, unless it's the {@code _id}
|
||||
* field.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder on(String... fields) {
|
||||
return id(UniqueMergeId.ofIdFields(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the identifier that determines if a results document matches an already existing document in the output
|
||||
* collection.
|
||||
*
|
||||
* @param id must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder id(UniqueMergeId id) {
|
||||
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update
|
||||
* aggregation}.
|
||||
*
|
||||
* @param let the variable expressions
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder let(Let let) {
|
||||
|
||||
this.let = let;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose the variables defined by {@link Let} to the {@link WhenDocumentsMatch#updateWith(Aggregation) update
|
||||
* aggregation}.
|
||||
*
|
||||
* @param let the variable expressions
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder exposeVariablesOf(Let let) {
|
||||
return let(let);
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param whenMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenMatched(WhenDocumentsMatch whenMatched) {
|
||||
|
||||
this.whenMatched = whenMatched;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param whenMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsMatch(WhenDocumentsMatch whenMatched) {
|
||||
return whenMatched(whenMatched);
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link Aggregation action} to take place when documents already exist in the target collection.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsMatchApply(Aggregation aggregation) {
|
||||
return whenMatched(WhenDocumentsMatch.updateWith(aggregation));
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents do not already exist in the target collection.
|
||||
*
|
||||
* @param whenNotMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenNotMatched(WhenDocumentsDontMatch whenNotMatched) {
|
||||
|
||||
this.whenNotMatched = whenNotMatched;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The action to take place when documents do not already exist in the target collection.
|
||||
*
|
||||
* @param whenNotMatched must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MergeOperationBuilder whenDocumentsDontMatch(WhenDocumentsDontMatch whenNotMatched) {
|
||||
return whenNotMatched(whenNotMatched);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link MergeOperation}.
|
||||
*/
|
||||
public MergeOperation build() {
|
||||
return new MergeOperation(new MergeOperationTarget(database, collection), id, let, whenMatched, whenNotMatched);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,243 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link RedactOperation} allows to restrict the content of a {@link Document} based on information stored within
|
||||
* itself.
|
||||
*
|
||||
* <pre class="code">
|
||||
* RedactOperation.builder() //
|
||||
* .when(Criteria.where("level").is(5)) //
|
||||
* .thenPrune() //
|
||||
* .otherwiseDescend() //
|
||||
* .build();
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/redact/">https://docs.mongodb.com/manual/reference/operator/aggregation/redact/</a>
|
||||
* @since 3.0
|
||||
*/
|
||||
public class RedactOperation implements AggregationOperation {
|
||||
|
||||
/**
|
||||
* Return fields at the current document level. Exclude embedded ones.
|
||||
*/
|
||||
public static final String DESCEND = "$$DESCEND";
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level.
|
||||
*/
|
||||
public static final String KEEP = "$$KEEP";
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level.
|
||||
*/
|
||||
public static final String PRUNE = "$$PRUNE";
|
||||
|
||||
private final AggregationExpression condition;
|
||||
|
||||
/**
|
||||
* Create new {@link RedactOperation}.
|
||||
*
|
||||
* @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or
|
||||
* {@literal $$KEEP}. Must not be {@literal null}.
|
||||
*/
|
||||
public RedactOperation(AggregationExpression condition) {
|
||||
|
||||
Assert.notNull(condition, "Condition must not be null!");
|
||||
this.condition = condition;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$redact", condition.toDocument(context));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a new instance of {@link RedactOperationBuilder} to specify condition and outcome of the {@literal $redact}
|
||||
* operation.
|
||||
*
|
||||
* @return new instance of {@link RedactOperationBuilder}.
|
||||
*/
|
||||
public static RedactOperationBuilder builder() {
|
||||
return new RedactOperationBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder to create new instance of {@link RedactOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class RedactOperationBuilder {
|
||||
|
||||
private Object when;
|
||||
private Object then;
|
||||
private Object otherwise;
|
||||
|
||||
private RedactOperationBuilder() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(CriteriaDefinition criteria) {
|
||||
|
||||
this.when = criteria;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(AggregationExpression condition) {
|
||||
|
||||
this.when = condition;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the evaluation condition.
|
||||
*
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder when(Document condition) {
|
||||
|
||||
this.when = condition;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return fields at the current document level and exclude embedded ones if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenDescend() {
|
||||
return then(DESCEND);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenKeep() {
|
||||
return then(KEEP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level if the condition is met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder thenPrune() {
|
||||
return then(PRUNE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP})
|
||||
* when the condition is met.
|
||||
*
|
||||
* @param then must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder then(Object then) {
|
||||
|
||||
this.then = then;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return fields at the current document level and exclude embedded ones if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwiseDescend() {
|
||||
return otherwise(DESCEND);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return/Keep all fields at the current document/embedded level if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwiseKeep() {
|
||||
return otherwise(KEEP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude all fields at this current document/embedded level if the condition is not met.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwisePrune() {
|
||||
return otherwise(PRUNE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the outcome (anything that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or {@literal $$KEEP})
|
||||
* when the condition is not met.
|
||||
*
|
||||
* @param otherwise must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public RedactOperationBuilder otherwise(Object otherwise) {
|
||||
this.otherwise = otherwise;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link RedactOperation}.
|
||||
*/
|
||||
public RedactOperation build() {
|
||||
return new RedactOperation(when().then(then).otherwise(otherwise));
|
||||
}
|
||||
|
||||
private ThenBuilder when() {
|
||||
|
||||
if (when instanceof CriteriaDefinition) {
|
||||
return ConditionalOperators.Cond.when((CriteriaDefinition) when);
|
||||
}
|
||||
if (when instanceof AggregationExpression) {
|
||||
return ConditionalOperators.Cond.when((AggregationExpression) when);
|
||||
}
|
||||
if (when instanceof Document) {
|
||||
return ConditionalOperators.Cond.when((Document) when);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format(
|
||||
"Invalid Condition. Expected CriteriaDefinition, AggregationExpression or Document but was %s.", when));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,23 +15,17 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.SetOperation.FieldAppender.ValueAppender;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input
|
||||
* documents and newly added fields.
|
||||
*
|
||||
*
|
||||
* <pre class="code">
|
||||
* SetOperation.set("totalHomework").toValue("A+").and().set("totalQuiz").toValue("B-")
|
||||
* </pre>
|
||||
@@ -41,10 +35,7 @@ import org.springframework.lang.Nullable;
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/set/">MongoDB Aggregation Framework:
|
||||
* $set</a>
|
||||
*/
|
||||
public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
private Map<Object, Object> valueMap;
|
||||
private ExposedFields exposedFields = ExposedFields.empty();
|
||||
public class SetOperation extends DocumentEnhancingOperation {
|
||||
|
||||
/**
|
||||
* Create new instance of {@link SetOperation} adding map keys as exposed fields.
|
||||
@@ -52,11 +43,7 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
private SetOperation(Map<Object, Object> source) {
|
||||
|
||||
this.valueMap = new LinkedHashMap<>(source);
|
||||
for (Object key : source.keySet()) {
|
||||
this.exposedFields = add(key);
|
||||
}
|
||||
super(source);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -97,7 +84,7 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
*/
|
||||
public SetOperation set(Object field, Object value) {
|
||||
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(this.valueMap);
|
||||
LinkedHashMap<Object, Object> target = new LinkedHashMap<>(getValueMap());
|
||||
target.put(field, value);
|
||||
|
||||
return new SetOperation(target);
|
||||
@@ -109,73 +96,12 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
* @return new instance of {@link FieldAppender}.
|
||||
*/
|
||||
public FieldAppender and() {
|
||||
return new FieldAppender(this.valueMap);
|
||||
return new FieldAppender(getValueMap());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext(
|
||||
exposedFields, context);
|
||||
|
||||
if (valueMap.size() == 1) {
|
||||
return context
|
||||
.getMappedObject(new Document("$set", toSetEntry(valueMap.entrySet().iterator().next(), operationContext)));
|
||||
}
|
||||
|
||||
Document $set = new Document();
|
||||
valueMap.entrySet().stream().map(it -> toSetEntry(it, operationContext)).forEach($set::putAll);
|
||||
return context.getMappedObject(new Document("$set", $set));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return exposedFields;
|
||||
}
|
||||
|
||||
private ExposedFields add(Object field) {
|
||||
|
||||
if (field instanceof Field) {
|
||||
return exposedFields.and(new ExposedField((Field) field, true));
|
||||
}
|
||||
if (field instanceof String) {
|
||||
return exposedFields.and(new ExposedField(Fields.field((String) field), true));
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected %s to be a field/property.", field));
|
||||
}
|
||||
|
||||
private static Document toSetEntry(Entry<Object, Object> entry, AggregationOperationContext context) {
|
||||
|
||||
String field = entry.getKey() instanceof String ? context.getReference((String) entry.getKey()).getRaw()
|
||||
: context.getReference((Field) entry.getKey()).getRaw();
|
||||
|
||||
Object value = computeValue(entry.getValue(), context);
|
||||
|
||||
return new Document(field, value);
|
||||
}
|
||||
|
||||
private static Object computeValue(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
return context.getReference((Field) value).toString();
|
||||
}
|
||||
if (value instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) value).toDocument(context);
|
||||
}
|
||||
if (value instanceof Collection) {
|
||||
return ((Collection) value).stream().map(it -> computeValue(it, context)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
return value;
|
||||
protected String mongoOperator() {
|
||||
return "$set";
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -217,6 +143,13 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
valueMap.put(field, value instanceof String ? Fields.fields((String) value) : value);
|
||||
return FieldAppender.this.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SetOperation withValueOfExpression(String operation, Object... values) {
|
||||
|
||||
valueMap.put(field, new ExpressionProjection(operation, values));
|
||||
return FieldAppender.this.build();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -226,6 +159,7 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface ValueAppender {
|
||||
@@ -245,6 +179,15 @@ public class SetOperation implements InheritsFieldsAggregationOperation {
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
SetOperation toValueOf(Object value);
|
||||
|
||||
/**
|
||||
* Adds a generic projection for the current field.
|
||||
*
|
||||
* @param operation the operation key, e.g. {@code $add}.
|
||||
* @param values the values to be set for the projection operation.
|
||||
* @return new instance of {@link SetOperation}.
|
||||
*/
|
||||
SetOperation withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,6 +127,15 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return Fields.fields(fields.toArray(new String[0]));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#continueOnMissingFieldReference()
|
||||
*/
|
||||
@Override
|
||||
public AggregationOperationContext continueOnMissingFieldReference() {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, mapper);
|
||||
}
|
||||
|
||||
protected FieldReference getReferenceFor(Field field) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext
|
||||
|
||||
@@ -296,7 +296,7 @@ public class VariableOperators {
|
||||
return toLet(ExposedFields.synthetic(Fields.fields(getVariableNames())), context);
|
||||
}
|
||||
|
||||
private String[] getVariableNames() {
|
||||
String[] getVariableNames() {
|
||||
|
||||
String[] varNames = new String[this.vars.size()];
|
||||
for (int i = 0; i < this.vars.size(); i++) {
|
||||
|
||||
@@ -26,7 +26,7 @@ import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.ConverterBuilder;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mapping.model.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToBigIntegerConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.ObjectIdToStringConverter;
|
||||
@@ -107,9 +107,8 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, Date.class)) {
|
||||
|
||||
conversionService.addConverter(
|
||||
ConverterBuilder.reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp()))
|
||||
.getReadingConverter());
|
||||
conversionService.addConverter(ConverterBuilder
|
||||
.reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp())).getReadingConverter());
|
||||
}
|
||||
|
||||
conversionService
|
||||
|
||||
@@ -44,7 +44,7 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
@@ -70,16 +70,16 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDbRefResolver.class);
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDbFactory}.
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public DefaultDbRefResolver(MongoDbFactory mongoDbFactory) {
|
||||
public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
@@ -116,7 +116,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Fetching DBRef '{}' from {}.{}.", dbRef.getId(),
|
||||
StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName() : mongoDbFactory.getDb().getName(),
|
||||
StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName() : mongoDbFactory.getMongoDatabase().getName(),
|
||||
dbRef.getCollectionName());
|
||||
}
|
||||
|
||||
@@ -156,7 +156,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Bulk fetching DBRefs {} from {}.{}.", ids,
|
||||
StringUtils.hasText(databaseSource.getDatabaseName()) ? databaseSource.getDatabaseName()
|
||||
: mongoDbFactory.getDb().getName(),
|
||||
: mongoDbFactory.getMongoDatabase().getName(),
|
||||
databaseSource.getCollectionName());
|
||||
}
|
||||
|
||||
@@ -497,7 +497,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
protected MongoCollection<Document> getCollection(DBRef dbref) {
|
||||
|
||||
return (StringUtils.hasText(dbref.getDatabaseName()) ? mongoDbFactory.getDb(dbref.getDatabaseName())
|
||||
: mongoDbFactory.getDb()).getCollection(dbref.getCollectionName(), Document.class);
|
||||
return (StringUtils.hasText(dbref.getDatabaseName()) ? mongoDbFactory.getMongoDatabase(dbref.getDatabaseName())
|
||||
: mongoDbFactory.getMongoDatabase()).getCollection(dbref.getCollectionName(), Document.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,16 +43,17 @@ import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PreferredConstructor.Parameter;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mapping.model.DefaultSpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.EntityInstantiator;
|
||||
import org.springframework.data.mapping.model.ParameterValueProvider;
|
||||
import org.springframework.data.mapping.model.PersistentEntityParameterValueProvider;
|
||||
import org.springframework.data.mapping.model.PropertyValueProvider;
|
||||
@@ -60,9 +61,10 @@ import org.springframework.data.mapping.model.SpELContext;
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
@@ -90,8 +92,10 @@ import com.mongodb.DBRef;
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
* @author Mark Paluch
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Heesu Jung
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware, ValueResolver {
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s";
|
||||
private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter!";
|
||||
@@ -109,6 +113,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private SpELContext spELContext;
|
||||
private @Nullable EntityCallbacks entityCallbacks;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
@@ -131,18 +136,19 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.idMapper = new QueryMapper(this);
|
||||
|
||||
this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE);
|
||||
this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext, MappingMongoConverter.this);
|
||||
this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext,
|
||||
MappingMongoConverter.this::getValueInternal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDbFactory} and {@link MappingContext}.
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link MongoDatabaseFactory} and {@link MappingContext}.
|
||||
*
|
||||
* @deprecated use the constructor taking a {@link DbRefResolver} instead.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
@Deprecated
|
||||
public MappingMongoConverter(MongoDbFactory mongoDbFactory,
|
||||
public MappingMongoConverter(MongoDatabaseFactory mongoDbFactory,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
this(new DefaultDbRefResolver(mongoDbFactory), mappingContext);
|
||||
setCodecRegistryProvider(mongoDbFactory);
|
||||
@@ -210,6 +216,26 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
this.applicationContext = applicationContext;
|
||||
this.spELContext = new SpELContext(this.spELContext, applicationContext);
|
||||
|
||||
if (entityCallbacks == null) {
|
||||
setEntityCallbacks(EntityCallbacks.create(applicationContext));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link EntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link AfterConvertCallback}.
|
||||
* <p />
|
||||
* Overrides potentially existing {@link EntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
* @throws IllegalArgumentException if the given instance is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
public void setEntityCallbacks(EntityCallbacks entityCallbacks) {
|
||||
|
||||
Assert.notNull(entityCallbacks, "EntityCallbacks must not be null!");
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -226,11 +252,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private <S extends Object> S read(TypeInformation<S> type, @Nullable Bson bson, ObjectPath path) {
|
||||
private <S extends Object> S read(TypeInformation<S> type, Bson bson, ObjectPath path) {
|
||||
|
||||
if (null == bson) {
|
||||
return null;
|
||||
}
|
||||
Assert.notNull(bson, "Bson must not be null!");
|
||||
|
||||
TypeInformation<? extends S> typeToUse = typeMapper.readType(bson, type);
|
||||
Class<? extends S> rawType = typeToUse.getType();
|
||||
@@ -431,7 +455,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
SpELExpressionEvaluator evaluator) {
|
||||
|
||||
return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), currentPath, evaluator,
|
||||
MappingMongoConverter.this);
|
||||
MappingMongoConverter.this::getValueInternal);
|
||||
}
|
||||
|
||||
private void readAssociation(Association<MongoPersistentProperty> association, PersistentPropertyAccessor<?> accessor,
|
||||
@@ -1043,12 +1067,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("No id property found on class " + entity.getType());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.ValueResolver#getValueInternal(org.springframework.data.mongodb.core.mapping.MongoPersistentProperty, com.mongodb.Document, org.springframework.data.mapping.model.SpELExpressionEvaluator, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator,
|
||||
@Nullable
|
||||
private Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator,
|
||||
ObjectPath path) {
|
||||
return new MongoDbPropertyValueProvider(bson, evaluator, path).getPropertyValue(prop);
|
||||
}
|
||||
@@ -1261,9 +1281,16 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
if (conversions.isSimpleType(obj.getClass())) {
|
||||
// Doesn't need conversion
|
||||
return getPotentiallyConvertedSimpleWrite(obj,
|
||||
typeInformation != null ? typeInformation.getType() : Object.class);
|
||||
|
||||
Class<?> conversionTargetType;
|
||||
|
||||
if (typeInformation != null && conversions.isSimpleType(typeInformation.getType())) {
|
||||
conversionTargetType = typeInformation.getType();
|
||||
} else {
|
||||
conversionTargetType = Object.class;
|
||||
}
|
||||
|
||||
return getPotentiallyConvertedSimpleWrite(obj, conversionTargetType);
|
||||
}
|
||||
|
||||
if (obj instanceof List) {
|
||||
@@ -1491,7 +1518,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), path, evaluator,
|
||||
MappingMongoConverter.this);
|
||||
MappingMongoConverter.this::getValueInternal);
|
||||
|
||||
DBRef dbref = rawRefValue instanceof DBRef ? (DBRef) rawRefValue : null;
|
||||
return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler);
|
||||
@@ -1573,7 +1600,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@Nullable
|
||||
private <T> T readAndConvertDBRef(@Nullable DBRef dbref, TypeInformation<?> type, ObjectPath path,
|
||||
final Class<?> rawType) {
|
||||
@Nullable Class<?> rawType) {
|
||||
|
||||
List<T> result = bulkReadAndConvertDBRefs(Collections.singletonList(dbref), type, path, rawType);
|
||||
return CollectionUtils.isEmpty(result) ? null : result.iterator().next();
|
||||
@@ -1596,7 +1623,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> List<T> bulkReadAndConvertDBRefs(List<DBRef> dbrefs, TypeInformation<?> type, ObjectPath path,
|
||||
final Class<?> rawType) {
|
||||
@Nullable Class<?> rawType) {
|
||||
|
||||
if (CollectionUtils.isEmpty(dbrefs)) {
|
||||
return Collections.emptyList();
|
||||
@@ -1607,23 +1634,27 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
: bulkReadRefs(dbrefs);
|
||||
String collectionName = dbrefs.iterator().next().getCollectionName();
|
||||
|
||||
List<T> targeList = new ArrayList<>(dbrefs.size());
|
||||
List<T> targetList = new ArrayList<>(dbrefs.size());
|
||||
|
||||
for (Document document : referencedRawDocuments) {
|
||||
|
||||
T target = null;
|
||||
if (document != null) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, (Class<T>) rawType, collectionName));
|
||||
}
|
||||
|
||||
final T target = (T) read(type, document, path);
|
||||
targeList.add(target);
|
||||
maybeEmitEvent(
|
||||
new AfterLoadEvent<>(document, (Class<T>) (rawType != null ? rawType : Object.class), collectionName));
|
||||
target = (T) read(type, document, path);
|
||||
}
|
||||
|
||||
if (target != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, target, collectionName));
|
||||
target = maybeCallAfterConvert(target, document, collectionName);
|
||||
}
|
||||
|
||||
targetList.add(target);
|
||||
}
|
||||
|
||||
return targeList;
|
||||
return targetList;
|
||||
}
|
||||
|
||||
private void maybeEmitEvent(MongoMappingEvent<?> event) {
|
||||
@@ -1637,6 +1668,15 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return this.applicationContext != null;
|
||||
}
|
||||
|
||||
protected <T> T maybeCallAfterConvert(T object, Document document, String collection) {
|
||||
|
||||
if (null != entityCallbacks) {
|
||||
return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection);
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs the fetch operation for the given {@link DBRef}.
|
||||
*
|
||||
@@ -1671,12 +1711,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MappingMongoConverter} using the given {@link MongoDbFactory} when loading {@link DBRef}.
|
||||
* Create a new {@link MappingMongoConverter} using the given {@link MongoDatabaseFactory} when loading {@link DBRef}.
|
||||
*
|
||||
* @return new instance of {@link MappingMongoConverter}. Never {@literal null}.
|
||||
* @since 2.1.6
|
||||
*/
|
||||
public MappingMongoConverter with(MongoDbFactory dbFactory) {
|
||||
public MappingMongoConverter with(MongoDatabaseFactory dbFactory) {
|
||||
|
||||
MappingMongoConverter target = new MappingMongoConverter(new DefaultDbRefResolver(dbFactory), mappingContext);
|
||||
target.applicationContext = applicationContext;
|
||||
|
||||
@@ -15,26 +15,40 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.ConverterFactory;
|
||||
import org.springframework.core.convert.converter.GenericConverter;
|
||||
import org.springframework.data.convert.JodaTimeConverters;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Value object to capture custom conversion. {@link MongoCustomConversions} also act as factory for
|
||||
* {@link org.springframework.data.mapping.model.SimpleTypeHolder}
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
* @see org.springframework.data.convert.CustomConversions
|
||||
* @see org.springframework.data.mapping.model.SimpleTypeHolder
|
||||
@@ -71,7 +85,33 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
* @param converters must not be {@literal null}.
|
||||
*/
|
||||
public MongoCustomConversions(List<?> converters) {
|
||||
super(STORE_CONVERSIONS, converters);
|
||||
this(MongoConverterConfigurationAdapter.from(converters));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoCustomConversions} given {@link MongoConverterConfigurationAdapter}.
|
||||
*
|
||||
* @param conversionConfiguration must not be {@literal null}.
|
||||
* @since 2.3
|
||||
*/
|
||||
protected MongoCustomConversions(MongoConverterConfigurationAdapter conversionConfiguration) {
|
||||
super(conversionConfiguration.createConverterConfiguration());
|
||||
}
|
||||
|
||||
/**
|
||||
* Functional style {@link org.springframework.data.convert.CustomConversions} creation giving users a convenient way
|
||||
* of configuring store specific capabilities by providing deferred hooks to what will be configured when creating the
|
||||
* {@link org.springframework.data.convert.CustomConversions#CustomConversions(ConverterConfiguration) instance}.
|
||||
*
|
||||
* @param configurer must not be {@literal null}.
|
||||
* @since 2.3
|
||||
*/
|
||||
public static MongoCustomConversions create(Consumer<MongoConverterConfigurationAdapter> configurer) {
|
||||
|
||||
MongoConverterConfigurationAdapter adapter = new MongoConverterConfigurationAdapter();
|
||||
configurer.accept(adapter);
|
||||
|
||||
return new MongoCustomConversions(adapter);
|
||||
}
|
||||
|
||||
@WritingConverter
|
||||
@@ -99,4 +139,181 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
return source != null ? source.toString() : null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoConverterConfigurationAdapter} encapsulates creation of
|
||||
* {@link org.springframework.data.convert.CustomConversions.ConverterConfiguration} with MongoDB specifics.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.3
|
||||
*/
|
||||
public static class MongoConverterConfigurationAdapter {
|
||||
|
||||
/**
|
||||
* List of {@literal java.time} types having different representation when rendered via the native
|
||||
* {@link org.bson.codecs.Codec} than the Spring Data {@link Converter}.
|
||||
*/
|
||||
private static final Set<Class<?>> JAVA_DRIVER_TIME_SIMPLE_TYPES = new HashSet<>(
|
||||
Arrays.asList(LocalDate.class, LocalTime.class, LocalDateTime.class));
|
||||
|
||||
private boolean useNativeDriverJavaTimeCodecs = false;
|
||||
private final List<Object> customConverters = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Create a {@link MongoConverterConfigurationAdapter} using the provided {@code converters} and our own codecs for
|
||||
* JSR-310 types.
|
||||
*
|
||||
* @param converters must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static MongoConverterConfigurationAdapter from(List<?> converters) {
|
||||
|
||||
Assert.notNull(converters, "Converters must not be null");
|
||||
|
||||
MongoConverterConfigurationAdapter converterConfigurationAdapter = new MongoConverterConfigurationAdapter();
|
||||
converterConfigurationAdapter.useSpringDataJavaTimeCodecs();
|
||||
converterConfigurationAdapter.registerConverters(converters);
|
||||
|
||||
return converterConfigurationAdapter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether or not to use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for
|
||||
* {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime}
|
||||
* and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}.
|
||||
*
|
||||
* @param useNativeDriverJavaTimeCodecs
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs(boolean useNativeDriverJavaTimeCodecs) {
|
||||
|
||||
this.useNativeDriverJavaTimeCodecs = useNativeDriverJavaTimeCodecs;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for
|
||||
* {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime}
|
||||
* and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}.
|
||||
*
|
||||
* @return this.
|
||||
* @see #useNativeDriverJavaTimeCodecs(boolean)
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter useNativeDriverJavaTimeCodecs() {
|
||||
return useNativeDriverJavaTimeCodecs(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use SpringData {@link Converter Jsr310 converters} for
|
||||
* {@link org.springframework.data.convert.Jsr310Converters.LocalDateToDateConverter LocalDate},
|
||||
* {@link org.springframework.data.convert.Jsr310Converters.LocalTimeToDateConverter LocalTime} and
|
||||
* {@link org.springframework.data.convert.Jsr310Converters.LocalDateTimeToDateConverter LocalDateTime} using the
|
||||
* {@link ZoneId#systemDefault()}.
|
||||
*
|
||||
* @return this.
|
||||
* @see #useNativeDriverJavaTimeCodecs(boolean)
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter useSpringDataJavaTimeCodecs() {
|
||||
return useNativeDriverJavaTimeCodecs(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom {@link Converter} implementation.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter registerConverter(Converter<?, ?> converter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
customConverters.add(converter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom {@link ConverterFactory} implementation.
|
||||
*
|
||||
* @param converterFactory must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter registerConverterFactory(ConverterFactory<?, ?> converterFactory) {
|
||||
|
||||
Assert.notNull(converterFactory, "ConverterFactory must not be null!");
|
||||
customConverters.add(converterFactory);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add {@link Converter converters}, {@link ConverterFactory factories}, ...
|
||||
*
|
||||
* @param converters must not be {@literal null} nor contain {@literal null} values.
|
||||
* @return this.
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter registerConverters(Collection<?> converters) {
|
||||
|
||||
Assert.notNull(converters, "Converters must not be null");
|
||||
Assert.noNullElements(converters, "Converters must not be null nor contain null values!");
|
||||
|
||||
customConverters.addAll(converters);
|
||||
return this;
|
||||
}
|
||||
|
||||
ConverterConfiguration createConverterConfiguration() {
|
||||
|
||||
if (!useNativeDriverJavaTimeCodecs) {
|
||||
return new ConverterConfiguration(STORE_CONVERSIONS, this.customConverters);
|
||||
}
|
||||
|
||||
/*
|
||||
* We need to have those converters using UTC as the default ones would go on with the systemDefault.
|
||||
*/
|
||||
List<Object> converters = new ArrayList<>(STORE_CONVERTERS.size() + 3);
|
||||
converters.add(DateToUtcLocalDateConverter.INSTANCE);
|
||||
converters.add(DateToUtcLocalTimeConverter.INSTANCE);
|
||||
converters.add(DateToUtcLocalDateTimeConverter.INSTANCE);
|
||||
converters.addAll(STORE_CONVERTERS);
|
||||
|
||||
StoreConversions storeConversions = StoreConversions
|
||||
.of(new SimpleTypeHolder(JAVA_DRIVER_TIME_SIMPLE_TYPES, MongoSimpleTypes.HOLDER), converters);
|
||||
|
||||
return new ConverterConfiguration(storeConversions, this.customConverters, convertiblePair -> {
|
||||
|
||||
// Avoid default registrations
|
||||
|
||||
if (JAVA_DRIVER_TIME_SIMPLE_TYPES.contains(convertiblePair.getSourceType())
|
||||
&& Date.class.isAssignableFrom(convertiblePair.getTargetType())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
private enum DateToUtcLocalDateTimeConverter implements Converter<Date, LocalDateTime> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public LocalDateTime convert(Date source) {
|
||||
return LocalDateTime.ofInstant(Instant.ofEpochMilli(source.getTime()), ZoneId.of("UTC"));
|
||||
}
|
||||
}
|
||||
|
||||
private enum DateToUtcLocalTimeConverter implements Converter<Date, LocalTime> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public LocalTime convert(Date source) {
|
||||
return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalTime();
|
||||
}
|
||||
}
|
||||
|
||||
private enum DateToUtcLocalDateConverter implements Converter<Date, LocalDate> {
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public LocalDate convert(Date source) {
|
||||
return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalDate();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,6 @@ import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -175,7 +174,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
Document mappedSort = new Document();
|
||||
for(Map.Entry<String,Object> entry : BsonUtils.asMap(sortObject).entrySet()) {
|
||||
for (Map.Entry<String, Object> entry : BsonUtils.asMap(sortObject).entrySet()) {
|
||||
|
||||
Field field = createPropertyField(entity, entry.getKey(), mappingContext);
|
||||
mappedSort.put(field.getMappedKey(), entry.getValue());
|
||||
@@ -420,7 +419,7 @@ public class QueryMapper {
|
||||
return false;
|
||||
}
|
||||
|
||||
Class<? extends Object> type = value.getClass();
|
||||
Class<?> type = value.getClass();
|
||||
MongoPersistentProperty property = documentField.getProperty();
|
||||
|
||||
if (property.getActualType().isAssignableFrom(type)) {
|
||||
@@ -444,7 +443,7 @@ public class QueryMapper {
|
||||
protected Object convertSimpleOrDocument(Object source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (source instanceof Example) {
|
||||
return exampleMapper.getMappedExample((Example) source, entity);
|
||||
return exampleMapper.getMappedExample((Example<?>) source, entity);
|
||||
}
|
||||
|
||||
if (source instanceof List) {
|
||||
@@ -923,6 +922,8 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class MetadataBackedField extends Field {
|
||||
|
||||
private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?|\\.\\d+");
|
||||
private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+");
|
||||
private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!";
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
@@ -964,7 +965,7 @@ public class QueryMapper {
|
||||
this.entity = entity;
|
||||
this.mappingContext = context;
|
||||
|
||||
this.path = getPath(name);
|
||||
this.path = getPath(removePlaceholders(POSITIONAL_PARAMETER_PATTERN, name));
|
||||
this.property = path == null ? property : path.getLeafProperty();
|
||||
this.association = findAssociation();
|
||||
}
|
||||
@@ -1072,7 +1073,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link PersistentPropertyPath} for the given <code>pathExpression</code>.
|
||||
* Returns the {@link PersistentPropertyPath} for the given {@code pathExpression}.
|
||||
*
|
||||
* @param pathExpression
|
||||
* @return
|
||||
@@ -1080,8 +1081,8 @@ public class QueryMapper {
|
||||
@Nullable
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
String rawPath = pathExpression.replaceAll("\\.\\d+", "") //
|
||||
.replaceAll(POSITIONAL_OPERATOR.pattern(), "");
|
||||
String rawPath = removePlaceholders(POSITIONAL_OPERATOR,
|
||||
removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression));
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
if (path == null || isPathToJavaLangClassProperty(path)) {
|
||||
@@ -1158,7 +1159,7 @@ public class QueryMapper {
|
||||
* @return
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(name, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1169,7 +1170,15 @@ public class QueryMapper {
|
||||
* @since 1.7
|
||||
*/
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new AssociationConverter(getAssociation());
|
||||
return new AssociationConverter(name, getAssociation());
|
||||
}
|
||||
|
||||
protected MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> getMappingContext() {
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
private static String removePlaceholders(Pattern pattern, String raw) {
|
||||
return pattern.matcher(raw).replaceAll("");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1180,8 +1189,9 @@ public class QueryMapper {
|
||||
|
||||
private final KeyMapper keyMapper;
|
||||
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey) {
|
||||
this.keyMapper = new KeyMapper(rawKey);
|
||||
public PositionParameterRetainingPropertyKeyConverter(String rawKey,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> ctx) {
|
||||
this.keyMapper = new KeyMapper(rawKey, ctx);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1223,7 +1233,8 @@ public class QueryMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
|
||||
public KeyMapper(String key) {
|
||||
public KeyMapper(String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
@@ -1243,6 +1254,7 @@ public class QueryMapper {
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
|
||||
if (isPositional) {
|
||||
@@ -1255,7 +1267,7 @@ public class QueryMapper {
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
private static boolean isPositionalParameter(String partial) {
|
||||
static boolean isPositionalParameter(String partial) {
|
||||
|
||||
if ("$".equals(partial)) {
|
||||
return true;
|
||||
@@ -1283,6 +1295,7 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class AssociationConverter implements Converter<MongoPersistentProperty, String> {
|
||||
|
||||
private final String name;
|
||||
private final MongoPersistentProperty property;
|
||||
private boolean associationFound;
|
||||
|
||||
@@ -1291,10 +1304,11 @@ public class QueryMapper {
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public AssociationConverter(Association<MongoPersistentProperty> association) {
|
||||
public AssociationConverter(String name, Association<MongoPersistentProperty> association) {
|
||||
|
||||
Assert.notNull(association, "Association must not be null!");
|
||||
this.property = association.getInverse();
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1312,6 +1326,12 @@ public class QueryMapper {
|
||||
associationFound = true;
|
||||
}
|
||||
|
||||
if (associationFound) {
|
||||
if (name.endsWith("$") && property.isCollectionLike()) {
|
||||
return source.getFieldName() + ".$";
|
||||
}
|
||||
}
|
||||
|
||||
return source.getFieldName();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -272,6 +272,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class MetadataBackedUpdateField extends MetadataBackedField {
|
||||
|
||||
@@ -289,7 +290,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$(\\[.*\\])?", ""), entity, mappingContext);
|
||||
super(key, entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
@@ -308,7 +309,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getPropertyConverter() {
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key);
|
||||
return new PositionParameterRetainingPropertyKeyConverter(key, getMappingContext());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -317,7 +318,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Converter<MongoPersistentProperty, String> getAssociationConverter() {
|
||||
return new UpdateAssociationConverter(getAssociation(), key);
|
||||
return new UpdateAssociationConverter(getMappingContext(), getAssociation(), key);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -334,10 +335,12 @@ public class UpdateMapper extends QueryMapper {
|
||||
*
|
||||
* @param association must not be {@literal null}.
|
||||
*/
|
||||
public UpdateAssociationConverter(Association<MongoPersistentProperty> association, String key) {
|
||||
public UpdateAssociationConverter(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
Association<MongoPersistentProperty> association, String key) {
|
||||
|
||||
super(association);
|
||||
this.mapper = new KeyMapper(key);
|
||||
super(key, association);
|
||||
this.mapper = new KeyMapper(key, mappingContext);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -19,12 +19,14 @@ import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Internal API to trigger the resolution of properties.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
interface ValueResolver {
|
||||
|
||||
@@ -38,6 +40,6 @@ interface ValueResolver {
|
||||
* @param parent
|
||||
* @return
|
||||
*/
|
||||
Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator,
|
||||
ObjectPath path);
|
||||
@Nullable
|
||||
Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator, ObjectPath path);
|
||||
}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class JustOnceLogger {
|
||||
|
||||
private static final Map<String, Set<String>> KNOWN_LOGS = new ConcurrentHashMap<>();
|
||||
private static final String AUTO_INDEX_CREATION_CONFIG_CHANGE;
|
||||
|
||||
static {
|
||||
AUTO_INDEX_CREATION_CONFIG_CHANGE = "Automatic index creation will be disabled by default as of Spring Data MongoDB 3.x."
|
||||
+ System.lineSeparator()
|
||||
+ "\tPlease use 'MongoMappingContext#setAutoIndexCreation(boolean)' or override 'MongoConfigurationSupport#autoIndexCreation()' to be explicit."
|
||||
+ System.lineSeparator()
|
||||
+ "\tHowever, we recommend setting up indices manually in an application ready block. You may use index derivation there as well."
|
||||
+ System.lineSeparator() + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> @EventListener(ApplicationReadyEvent.class)" + System.lineSeparator() //
|
||||
+ "\t> public void initIndicesAfterStartup() {" + System.lineSeparator() //
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class);" + System.lineSeparator()//
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoMappingContext);"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex);" + System.lineSeparator() //
|
||||
+ "\t> }" + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator();
|
||||
}
|
||||
|
||||
static void logWarnIndexCreationConfigurationChange(String loggerName) {
|
||||
warnOnce(loggerName, AUTO_INDEX_CREATION_CONFIG_CHANGE);
|
||||
}
|
||||
|
||||
static void warnOnce(String loggerName, String message) {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(loggerName);
|
||||
if (!logger.isWarnEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!KNOWN_LOGS.containsKey(loggerName)) {
|
||||
|
||||
KNOWN_LOGS.put(loggerName, new ConcurrentSkipListSet<>(Collections.singleton(message)));
|
||||
logger.warn(message);
|
||||
} else {
|
||||
|
||||
Set<String> messages = KNOWN_LOGS.get(loggerName);
|
||||
if (messages.contains(message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
messages.add(message);
|
||||
logger.warn(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -26,7 +26,7 @@ import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextEvent;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -62,7 +62,7 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
* {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param indexOperationsProvider must not be {@literal null}.
|
||||
@@ -74,7 +74,7 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
* {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
@@ -139,8 +139,6 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
|
||||
@@ -26,7 +26,7 @@ import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
* {@link IndexFilter} implementation for usage with plain {@link DBObject} as well as {@link CriteriaDefinition} filter
|
||||
* {@link IndexFilter} implementation for usage with plain {@link Document} as well as {@link CriteriaDefinition} filter
|
||||
* expressions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -38,7 +38,7 @@ public class PartialIndexFilter implements IndexFilter {
|
||||
private final @NonNull Object filterExpression;
|
||||
|
||||
/**
|
||||
* Create new {@link PartialIndexFilter} for given {@link DBObject filter expression}.
|
||||
* Create new {@link PartialIndexFilter} for given {@link Document filter expression}.
|
||||
*
|
||||
* @param where must not be {@literal null}.
|
||||
* @return
|
||||
|
||||
@@ -142,8 +142,6 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
|
||||
Mono<String> createIndex(IndexDefinitionHolder indexDefinition) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
return operationsProvider.indexOps(indexDefinition.getCollection()).ensureIndex(indexDefinition) //
|
||||
.onErrorResume(ReactiveMongoPersistentEntityIndexCreator::isDataIntegrityViolation,
|
||||
e -> translateException(e, indexDefinition));
|
||||
|
||||
@@ -37,6 +37,7 @@ import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -63,6 +64,8 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
private final @Nullable String collation;
|
||||
private final @Nullable Expression collationExpression;
|
||||
|
||||
private final ShardKey shardKey;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BasicMongoPersistentEntity} with the given {@link TypeInformation}. Will default the
|
||||
* collection name to the entities simple type name.
|
||||
@@ -92,6 +95,27 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
this.collation = null;
|
||||
this.collationExpression = null;
|
||||
}
|
||||
|
||||
this.shardKey = detectShardKey();
|
||||
}
|
||||
|
||||
private ShardKey detectShardKey() {
|
||||
|
||||
if (!isAnnotationPresent(Sharded.class)) {
|
||||
return ShardKey.none();
|
||||
}
|
||||
|
||||
Sharded sharded = getRequiredAnnotation(Sharded.class);
|
||||
|
||||
String[] keyProperties = sharded.shardKey();
|
||||
if (ObjectUtils.isEmpty(keyProperties)) {
|
||||
keyProperties = new String[] { "_id" };
|
||||
}
|
||||
|
||||
ShardKey shardKey = ShardingStrategy.HASH.equals(sharded.shardingStrategy()) ? ShardKey.hash(keyProperties)
|
||||
: ShardKey.range(keyProperties);
|
||||
|
||||
return sharded.immutableKey() ? ShardKey.immutable(shardKey) : shardKey;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -160,6 +184,11 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
: null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShardKey getShardKey() {
|
||||
return shardKey;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.BasicPersistentEntity#verify()
|
||||
|
||||
@@ -79,8 +79,14 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
this.fieldNamingStrategy = fieldNamingStrategy == null ? PropertyNameFieldNamingStrategy.INSTANCE
|
||||
: fieldNamingStrategy;
|
||||
|
||||
if (isIdProperty() && getFieldName() != ID_FIELD_NAME) {
|
||||
LOG.warn("Customizing field name for id property not allowed! Custom name will not be considered!");
|
||||
if (isIdProperty() && hasExplicitFieldName()) {
|
||||
|
||||
String annotatedName = getAnnotatedFieldName();
|
||||
if (!ID_FIELD_NAME.equals(annotatedName)) {
|
||||
LOG.warn(
|
||||
"Customizing field name for id property '{}.{}' is not allowed! Custom name ('{}') will not be considered!",
|
||||
owner.getName(), getName(), annotatedName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,6 +173,11 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
|
||||
FieldType fieldType = fieldAnnotation.targetType();
|
||||
if (fieldType == FieldType.IMPLICIT) {
|
||||
|
||||
if (isEntity()) {
|
||||
return org.bson.Document.class;
|
||||
}
|
||||
|
||||
return getType();
|
||||
}
|
||||
|
||||
|
||||
@@ -42,8 +42,7 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
private static final FieldNamingStrategy DEFAULT_NAMING_STRATEGY = PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
|
||||
private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY;
|
||||
private @Nullable ApplicationContext context;
|
||||
private boolean autoIndexCreation = true;
|
||||
private boolean autoIndexCreation = false;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoMappingContext}.
|
||||
@@ -99,8 +98,6 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
|
||||
super.setApplicationContext(applicationContext);
|
||||
|
||||
this.context = applicationContext;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -108,7 +105,8 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
* <strong>NOTE:</strong>Index creation should happen at a well-defined time that is ideally controlled by the
|
||||
* application itself.
|
||||
*
|
||||
* @return {@literal true} when auto-index creation is enabled; {@literal false} otherwise.
|
||||
* @return {@literal true} when auto-index creation is enabled; {@literal false} otherwise. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will is set to {@literal false} was {@literal true} in 2.x.
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mongodb.core.index.Indexed
|
||||
*/
|
||||
@@ -121,7 +119,7 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
* <strong>NOTE:</strong>Index creation should happen at a well-defined time that is ideally controlled by the
|
||||
* application itself.
|
||||
*
|
||||
* @param autoCreateIndexes set to {@literal false} to disable auto-index creation.
|
||||
* @param autoCreateIndexes set to {@literal true} to enable auto-index creation.
|
||||
* @since 2.2
|
||||
* @see org.springframework.data.mongodb.core.index.Indexed
|
||||
*/
|
||||
|
||||
@@ -77,4 +77,20 @@ public interface MongoPersistentEntity<T> extends PersistentEntity<T, MongoPersi
|
||||
return getCollation() != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the entities shard key if defined.
|
||||
*
|
||||
* @return {@link ShardKey#none()} if not not set.
|
||||
* @since 3.0
|
||||
*/
|
||||
ShardKey getShardKey();
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link #getShardKey() shard key} is sharded.
|
||||
* @since 3.0
|
||||
*/
|
||||
default boolean isSharded() {
|
||||
return getShardKey().isSharded();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.time.Instant;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
@@ -71,6 +72,7 @@ public abstract class MongoSimpleTypes {
|
||||
simpleTypes.add(Pattern.class);
|
||||
simpleTypes.add(Symbol.class);
|
||||
simpleTypes.add(UUID.class);
|
||||
simpleTypes.add(Instant.class);
|
||||
|
||||
simpleTypes.add(BsonBinary.class);
|
||||
simpleTypes.add(BsonBoolean.class);
|
||||
|
||||
@@ -0,0 +1,148 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Value object representing an entities <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/">Shard
|
||||
* Key</a> used to distribute documents across a sharded MongoDB cluster.
|
||||
* <p />
|
||||
* {@link ShardKey#isImmutable() Immutable} shard keys indicates a fixed value that is not updated (see
|
||||
* <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/#change-a-document-s-shard-key-value">MongoDB
|
||||
* Reference: Change a Document's Shard Key Value</a>), which allows to skip server round trips in cases where a
|
||||
* potential shard key change might have occurred.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ShardKey {
|
||||
|
||||
private static final ShardKey NONE = new ShardKey(Collections.emptyList(), null, true);
|
||||
|
||||
private final List<String> propertyNames;
|
||||
private final @Nullable ShardingStrategy shardingStrategy;
|
||||
private final boolean immutable;
|
||||
|
||||
private ShardKey(List<String> propertyNames, @Nullable ShardingStrategy shardingStrategy, boolean immutable) {
|
||||
|
||||
this.propertyNames = propertyNames;
|
||||
this.shardingStrategy = shardingStrategy;
|
||||
this.immutable = immutable;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of properties used to form the shard key.
|
||||
*/
|
||||
public int size() {
|
||||
return propertyNames.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the unmodifiable collection of property names forming the shard key.
|
||||
*/
|
||||
public Collection<String> getPropertyNames() {
|
||||
return propertyNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the shard key of an document does not change.
|
||||
* @see <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/#change-a-document-s-shard-key-value">MongoDB
|
||||
* Reference: Change a Document's Shard Key Value</a>
|
||||
*/
|
||||
public boolean isImmutable() {
|
||||
return immutable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return whether the shard key represents a sharded key. Return {@literal false} if the key is not sharded.
|
||||
*
|
||||
* @return {@literal true} if the key is sharded; {@literal false} otherwise.
|
||||
*/
|
||||
public boolean isSharded() {
|
||||
return !propertyNames.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw MongoDB representation of the {@link ShardKey}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public Document getDocument() {
|
||||
|
||||
Document doc = new Document();
|
||||
for (String field : propertyNames) {
|
||||
doc.append(field, shardingValue());
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
private Object shardingValue() {
|
||||
return ObjectUtils.nullSafeEquals(ShardingStrategy.HASH, shardingStrategy) ? "hash" : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ShardKey} indicating no shard key has been defined.
|
||||
*
|
||||
* @return {@link #NONE}
|
||||
*/
|
||||
public static ShardKey none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ShardingStrategy#RANGE} shard key.
|
||||
*
|
||||
* @param propertyNames must not be {@literal null}.
|
||||
* @return new instance of {@link ShardKey}.
|
||||
*/
|
||||
public static ShardKey range(String... propertyNames) {
|
||||
return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.RANGE, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ShardingStrategy#RANGE} shard key.
|
||||
*
|
||||
* @param propertyNames must not be {@literal null}.
|
||||
* @return new instance of {@link ShardKey}.
|
||||
*/
|
||||
public static ShardKey hash(String... propertyNames) {
|
||||
return new ShardKey(Arrays.asList(propertyNames), ShardingStrategy.HASH, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn the given {@link ShardKey} into an {@link #isImmutable() immutable} one.
|
||||
*
|
||||
* @param shardKey must not be {@literal null}.
|
||||
* @return new instance of {@link ShardKey} if the given shard key is not already immutable.
|
||||
*/
|
||||
public static ShardKey immutable(ShardKey shardKey) {
|
||||
|
||||
if (shardKey.isImmutable()) {
|
||||
return shardKey;
|
||||
}
|
||||
|
||||
return new ShardKey(shardKey.propertyNames, shardKey.shardingStrategy, true);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
|
||||
/**
|
||||
* The {@link Sharded} annotation provides meta information about the actual distribution of data. The
|
||||
* {@link #shardKey()} is used to distribute documents across shards. <br />
|
||||
* Please see the <a href="https://docs.mongodb.com/manual/sharding/">MongoDB Documentation</a> for more information
|
||||
* about requirements and limitations of sharding.
|
||||
* <p/>
|
||||
* Spring Data adds the shard key to filter queries used for
|
||||
* {@link com.mongodb.client.MongoCollection#replaceOne(org.bson.conversions.Bson, Object)} operations triggered by
|
||||
* {@code save} operations on {@link org.springframework.data.mongodb.core.MongoOperations} and
|
||||
* {@link org.springframework.data.mongodb.core.ReactiveMongoOperations} as well as {@code update/upsert} operations
|
||||
* replacing/upserting a single existing document as long as the given
|
||||
* {@link org.springframework.data.mongodb.core.query.UpdateDefinition} holds a full copy of the entity.
|
||||
* <p/>
|
||||
* All other operations that require the presence of the {@literal shard key} in the filter query need to provide the
|
||||
* information via the {@link org.springframework.data.mongodb.core.query.Query} parameter when invoking the method.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
@Persistent
|
||||
@Inherited
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.TYPE, ElementType.ANNOTATION_TYPE })
|
||||
public @interface Sharded {
|
||||
|
||||
/**
|
||||
* Alias for {@link #shardKey()}.
|
||||
*
|
||||
* @return {@literal _id} by default.
|
||||
* @see #shardKey()
|
||||
*/
|
||||
@AliasFor("shardKey")
|
||||
String[] value() default {};
|
||||
|
||||
/**
|
||||
* The shard key determines the distribution of the collection's documents among the cluster's shards. The shard key
|
||||
* is either a single or multiple indexed properties that exist in every document in the collection.
|
||||
* <p/>
|
||||
* By default the {@literal id} property is used for sharding. <br />
|
||||
* <strong>NOTE</strong> Required indexes are not created automatically. Create these either externally, via
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)}
|
||||
* or by annotating your domain model with {@link org.springframework.data.mongodb.core.index.Indexed}/
|
||||
* {@link org.springframework.data.mongodb.core.index.CompoundIndex} along with enabled
|
||||
* {@link org.springframework.data.mongodb.config.MongoConfigurationSupport#autoIndexCreation() auto index creation}.
|
||||
*
|
||||
* @return an empty key by default. Which indicates to use the entities {@literal id} property.
|
||||
*/
|
||||
@AliasFor("value")
|
||||
String[] shardKey() default {};
|
||||
|
||||
/**
|
||||
* The sharding strategy to use for distributing data across sharded clusters.
|
||||
*
|
||||
* @return {@link ShardingStrategy#RANGE} by default
|
||||
*/
|
||||
ShardingStrategy shardingStrategy() default ShardingStrategy.RANGE;
|
||||
|
||||
/**
|
||||
* As of MongoDB 4.2 it is possible to change the shard key using update. Using immutable shard keys avoids server
|
||||
* round trips to obtain an entities actual shard key from the database.
|
||||
*
|
||||
* @return {@literal false} by default.
|
||||
* @see <a href="https://docs.mongodb.com/manual/core/sharding-shard-key/#change-a-document-s-shard-key-value">MongoDB
|
||||
* Reference: Change a Document's Shard Key Value</a>
|
||||
*/
|
||||
boolean immutableKey() default false;
|
||||
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public enum ShardingStrategy {
|
||||
|
||||
/**
|
||||
* Ranged sharding involves dividing data into ranges based on the shard key values. Each chunk is then assigned a
|
||||
* range based on the shard key values.
|
||||
*/
|
||||
RANGE,
|
||||
|
||||
/**
|
||||
* Hashed Sharding involves computing a hash of the shard key field’s value. Each chunk is then assigned a range based
|
||||
* on the hashed shard key values.
|
||||
*/
|
||||
HASH
|
||||
}
|
||||
@@ -26,9 +26,11 @@ import org.springframework.data.annotation.ReadOnlyProperty;
|
||||
/**
|
||||
* {@link TextScore} marks the property to be considered as the on server calculated {@literal textScore} when doing
|
||||
* full text search. <br />
|
||||
* <b>NOTE</b> Property will not be written when saving entity.
|
||||
* <b>NOTE</b> Property will not be written when saving entity and may be {@literal null} if the document is retrieved
|
||||
* by a regular (i.e. {@literal $text}) query.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.6
|
||||
*/
|
||||
@ReadOnlyProperty
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
|
||||
/**
|
||||
* Callback being invoked after a domain object is materialized from a {@link Document} when reading results.
|
||||
*
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
* @see org.springframework.data.mapping.callback.EntityCallbacks
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface AfterConvertCallback<T> extends EntityCallback<T> {
|
||||
|
||||
/**
|
||||
* Entity callback method invoked after a domain object is materialized from a {@link Document}. Can return either the
|
||||
* same or a modified instance of the domain object.
|
||||
*
|
||||
* @param entity the domain object (the result of the conversion).
|
||||
* @param document must not be {@literal null}.
|
||||
* @param collection name of the collection.
|
||||
* @return the domain object that is the result of reading it from the {@link Document}.
|
||||
*/
|
||||
T onAfterConvert(T entity, Document document, String collection);
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
|
||||
/**
|
||||
* Entity callback triggered after save of a {@link Document}.
|
||||
*
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 3.0
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface AfterSaveCallback<T> extends EntityCallback<T> {
|
||||
|
||||
/**
|
||||
* Entity callback method invoked after a domain object is saved. Can return either the same or a modified instance of
|
||||
* the domain object.
|
||||
*
|
||||
* @param entity the domain object that was saved.
|
||||
* @param document {@link Document} representing the {@code entity}.
|
||||
* @param collection name of the collection.
|
||||
* @return the domain object that was persisted.
|
||||
*/
|
||||
T onAfterSave(T entity, Document document, String collection);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user