Compare commits
295 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ede2f5eeda | ||
|
|
49feb3d55e | ||
|
|
3e41264aeb | ||
|
|
dd5b1f7989 | ||
|
|
f4adc29c60 | ||
|
|
85807975e9 | ||
|
|
f6f9bba2ff | ||
|
|
32b992b918 | ||
|
|
91c69eed10 | ||
|
|
314156a4cd | ||
|
|
1f5e2c714d | ||
|
|
949b1e596e | ||
|
|
46d4206297 | ||
|
|
86db5f8265 | ||
|
|
5aa4eb058d | ||
|
|
71a077a599 | ||
|
|
25840d61fc | ||
|
|
553912a380 | ||
|
|
b418fa16e5 | ||
|
|
2f627cc02d | ||
|
|
ba1b769c1a | ||
|
|
47747aca8d | ||
|
|
ce8a97b1e5 | ||
|
|
a567cb1f7b | ||
|
|
b42081f05b | ||
|
|
9380cfea08 | ||
|
|
3466281a8b | ||
|
|
26e8b3a8ec | ||
|
|
b0814d334f | ||
|
|
33624fcc6a | ||
|
|
7e63897b87 | ||
|
|
2c446f5693 | ||
|
|
e05dffec18 | ||
|
|
47dd74b8d6 | ||
|
|
c7e4673e29 | ||
|
|
9d694dbdd5 | ||
|
|
74179a9af9 | ||
|
|
c1c5e3bdbb | ||
|
|
24978577cc | ||
|
|
c43ae822be | ||
|
|
881a69864f | ||
|
|
dc054a793f | ||
|
|
f048fb6800 | ||
|
|
20fdb0e63c | ||
|
|
7e3ff86d36 | ||
|
|
54c9933535 | ||
|
|
1f07a3eb29 | ||
|
|
76f4328498 | ||
|
|
c572ca4e3b | ||
|
|
f1d214b87d | ||
|
|
f4854a15f9 | ||
|
|
b491d6ce81 | ||
|
|
0b482f8dcf | ||
|
|
9f1b27c305 | ||
|
|
b6e69606b1 | ||
|
|
0429ea69fa | ||
|
|
383b7031dd | ||
|
|
e8f0ed8d59 | ||
|
|
f90d30472b | ||
|
|
cec1c8305a | ||
|
|
cd714b1ba8 | ||
|
|
0a83eec8b2 | ||
|
|
3a28fb18f6 | ||
|
|
6e65d2fd1d | ||
|
|
8d63c10e07 | ||
|
|
aee5f71229 | ||
|
|
5cc044b72c | ||
|
|
45243468ec | ||
|
|
5a162bdbaf | ||
|
|
1d15eaf42e | ||
|
|
0076e294ae | ||
|
|
cd8603caa3 | ||
|
|
aac1c3406b | ||
|
|
3a16cc80c5 | ||
|
|
c2ba361d24 | ||
|
|
eb5a331822 | ||
|
|
3100e0db01 | ||
|
|
4efaed4741 | ||
|
|
00bbf439c0 | ||
|
|
87476f9a91 | ||
|
|
7bcf686b40 | ||
|
|
8e51d005d5 | ||
|
|
1940a5c2c2 | ||
|
|
5a274029d7 | ||
|
|
c342bf266e | ||
|
|
11baf455d2 | ||
|
|
05882813ac | ||
|
|
bd3f26c928 | ||
|
|
5555aa970b | ||
|
|
e74fe05abd | ||
|
|
d579254fbc | ||
|
|
71c8e4cc02 | ||
|
|
a087c7d17c | ||
|
|
90cec275a6 | ||
|
|
e4eefe577d | ||
|
|
b57a6612f6 | ||
|
|
62b2d54e0d | ||
|
|
aff823da57 | ||
|
|
d45b630724 | ||
|
|
fc8c97aeb0 | ||
|
|
004e7f01b2 | ||
|
|
5c80ee0087 | ||
|
|
adb9dc29a2 | ||
|
|
6eb6feadbb | ||
|
|
166aab39c4 | ||
|
|
fa94c22c2a | ||
|
|
e0f88a8b84 | ||
|
|
59aa8051d3 | ||
|
|
205a06e79a | ||
|
|
899b43a29b | ||
|
|
0f0a4ed31b | ||
|
|
9acc8d5268 | ||
|
|
313ffb5426 | ||
|
|
dc859953f4 | ||
|
|
bc29f2b24e | ||
|
|
686cdac73f | ||
|
|
b7b339577b | ||
|
|
2166a6e953 | ||
|
|
3c601a699a | ||
|
|
37211fc6d7 | ||
|
|
a45c9040c4 | ||
|
|
23c0a07b93 | ||
|
|
f3a7d6a20e | ||
|
|
0d22d831f8 | ||
|
|
6b0e2ab5de | ||
|
|
5d02b84856 | ||
|
|
93e911985e | ||
|
|
e7faa1a1ec | ||
|
|
631714941a | ||
|
|
db9428cebe | ||
|
|
4be53ac952 | ||
|
|
564acd75d5 | ||
|
|
95ccdf4c20 | ||
|
|
291ef4bb75 | ||
|
|
c7461928f4 | ||
|
|
f5a5d3e96b | ||
|
|
b213aada80 | ||
|
|
403e5043cb | ||
|
|
bdbda459c0 | ||
|
|
0bf6d5f7fa | ||
|
|
f2ae14206a | ||
|
|
049159374d | ||
|
|
79f8e06fc1 | ||
|
|
370db2dce5 | ||
|
|
74325d5193 | ||
|
|
e6ea2e1379 | ||
|
|
cb85f3cfa6 | ||
|
|
aff8b89006 | ||
|
|
0ad8857368 | ||
|
|
46de82fe0b | ||
|
|
387348b615 | ||
|
|
8fd41faac6 | ||
|
|
8a15e1086b | ||
|
|
8502786648 | ||
|
|
d7107d49bf | ||
|
|
f42cb1e2f0 | ||
|
|
a9403b526f | ||
|
|
5f6291ed32 | ||
|
|
676ee80434 | ||
|
|
b54641ff86 | ||
|
|
6930c720ca | ||
|
|
611cfe9c11 | ||
|
|
507a1fbf34 | ||
|
|
087649de35 | ||
|
|
1f01f34377 | ||
|
|
295c43c6ff | ||
|
|
5a62d449bf | ||
|
|
1cbbe692b5 | ||
|
|
5bfe125160 | ||
|
|
1b6722324e | ||
|
|
a212f5f79d | ||
|
|
2879348d4b | ||
|
|
10097311c7 | ||
|
|
b8303a56b6 | ||
|
|
f9e468aebb | ||
|
|
b900dc6c09 | ||
|
|
bede55714c | ||
|
|
3ec426352f | ||
|
|
c6293e0ebd | ||
|
|
74e49a2326 | ||
|
|
69c451f69f | ||
|
|
9af8160e05 | ||
|
|
fdf4ea1e60 | ||
|
|
8c7afe012f | ||
|
|
6ba258a1f3 | ||
|
|
059c8cf1dd | ||
|
|
2b8955f583 | ||
|
|
23fde167f6 | ||
|
|
9470f82e9b | ||
|
|
1e88e241d4 | ||
|
|
0b8396c43c | ||
|
|
b602e4cb26 | ||
|
|
500393e596 | ||
|
|
7e4cbdb8b0 | ||
|
|
1d6d8ff8e6 | ||
|
|
8ea4cbe9ea | ||
|
|
45a0c36184 | ||
|
|
599c79bce2 | ||
|
|
eda6d40aa7 | ||
|
|
22b844c87f | ||
|
|
bdf7ec7c9b | ||
|
|
13db06d345 | ||
|
|
365ecd53c4 | ||
|
|
dc40c42815 | ||
|
|
49415efb8c | ||
|
|
dc234906f4 | ||
|
|
a7f51a7c85 | ||
|
|
9b0bd11d09 | ||
|
|
d7ad883f69 | ||
|
|
44308bfbe1 | ||
|
|
9b673d342f | ||
|
|
5517198310 | ||
|
|
819a04f3db | ||
|
|
f7202067a5 | ||
|
|
f20a0f20c9 | ||
|
|
02216d5941 | ||
|
|
79f2094322 | ||
|
|
afbc5cfa25 | ||
|
|
a3882a5e5c | ||
|
|
8194772388 | ||
|
|
12f18850dc | ||
|
|
816c1da248 | ||
|
|
5a78f19781 | ||
|
|
698837921b | ||
|
|
0f7fc7880b | ||
|
|
6e42f49b08 | ||
|
|
bdfe4e99ed | ||
|
|
85aa3927a6 | ||
|
|
33c4e4294f | ||
|
|
a89ab387cc | ||
|
|
e52b8c9d38 | ||
|
|
4dbf4795db | ||
|
|
8e4c6f68ae | ||
|
|
fddbd126ea | ||
|
|
ee5b26ab1c | ||
|
|
01e9a2ed67 | ||
|
|
10107c7b81 | ||
|
|
abe7876086 | ||
|
|
a759dff5fd | ||
|
|
9f8d081ef3 | ||
|
|
b8f6030441 | ||
|
|
267decf189 | ||
|
|
3a7492c68d | ||
|
|
273088b6a8 | ||
|
|
723b481f82 | ||
|
|
8a34bc46a2 | ||
|
|
bb4c16f4cd | ||
|
|
cf5b7c9763 | ||
|
|
f4414e98a2 | ||
|
|
a97bfd2a37 | ||
|
|
9fe0f5c984 | ||
|
|
718a7ffe8c | ||
|
|
f7106dc425 | ||
|
|
0698f8bcb8 | ||
|
|
3effd9ae6f | ||
|
|
7002cd1456 | ||
|
|
a15d488657 | ||
|
|
44651581b1 | ||
|
|
6d64f5b2b2 | ||
|
|
0c52a29ba8 | ||
|
|
bd8bd4f568 | ||
|
|
c75f29dc42 | ||
|
|
e493af7266 | ||
|
|
8d892e5924 | ||
|
|
053299f243 | ||
|
|
872659cc00 | ||
|
|
96978a6194 | ||
|
|
2253d3e301 | ||
|
|
5982ee84f7 | ||
|
|
dd2af6462d | ||
|
|
622643bf24 | ||
|
|
51cc55baac | ||
|
|
0b106e5649 | ||
|
|
8975d93ab3 | ||
|
|
e25b6c49f5 | ||
|
|
7a70c205de | ||
|
|
6045efa450 | ||
|
|
7b0816b3ee | ||
|
|
14e4ea736d | ||
|
|
32e7d9ab7f | ||
|
|
7f35ad9e45 | ||
|
|
60228f6e5a | ||
|
|
7604492b7f | ||
|
|
4680fe0e77 | ||
|
|
b4228c88d3 | ||
|
|
f6ef8c94c8 | ||
|
|
0d0dafa85e | ||
|
|
29aa34619f | ||
|
|
7f19f769c4 | ||
|
|
a40e89d90a | ||
|
|
6b2350200a | ||
|
|
fb50b0f6e7 | ||
|
|
ab568229b5 | ||
|
|
7f9c1bd774 | ||
|
|
670a0978da |
40
.travis.yml
40
.travis.yml
@@ -1,40 +0,0 @@
|
||||
language: java
|
||||
|
||||
before_install:
|
||||
- mkdir -p downloads
|
||||
- mkdir -p var/db var/log
|
||||
- if [[ ! -d downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION} ]] ; then cd downloads && wget https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}.tgz && tar xzf mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}.tgz && cd ..; fi
|
||||
- downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongod --version
|
||||
- downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongod --dbpath var/db --replSet rs0 --fork --logpath var/log/mongod.log
|
||||
- sleep 10
|
||||
- |-
|
||||
downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongo --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});"
|
||||
sleep 15
|
||||
|
||||
jdk:
|
||||
- openjdk13
|
||||
- openjdk-ea
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- jdk: openjdk-ea
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- MONGO_VERSION=4.2.0
|
||||
- MONGO_VERSION=4.0.14
|
||||
- MONGO_VERSION=3.6.16
|
||||
global:
|
||||
- PROFILE=ci
|
||||
|
||||
sudo: false
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.m2
|
||||
- downloads
|
||||
|
||||
install: true
|
||||
|
||||
script:
|
||||
- "./mvnw clean dependency:list test -Pjava11 -Dsort -U"
|
||||
147
Jenkinsfile
vendored
147
Jenkinsfile
vendored
@@ -3,7 +3,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/2.1.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -12,94 +12,15 @@ pipeline {
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 14 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk14-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk14-with-mongodb-4.2.0", "ci/openjdk14-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk8)") {
|
||||
stage("Test") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch '2.1.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.0 (jdk8)") {
|
||||
stage("test: baseline") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
@@ -111,63 +32,22 @@ pipeline {
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 4.2 (jdk8)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk14)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk14-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch 'issue/*'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
@@ -188,15 +68,15 @@ pipeline {
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
stage('Release to artifactory with docs') {
|
||||
when {
|
||||
branch 'master'
|
||||
branch '2.1.x'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
@@ -212,11 +92,14 @@ pipeline {
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
141
README.adoc
141
README.adoc
@@ -50,11 +50,11 @@ public class MyService {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() {
|
||||
return MongoClients.create();
|
||||
return new MongoClient();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -94,143 +94,6 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Upgrading from 2.x
|
||||
|
||||
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||
|
||||
=== XML Namespace
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
| Now exposes a `com.mongodb.client.MongoClient`
|
||||
|
||||
| `<mongo:mongo-client replica-set="..." />`
|
||||
| Was a comma delimited list of replica set members (host/port)
|
||||
| Now defines the replica set name. +
|
||||
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
| `<mongo:db-factory writeConcern="..." />`
|
||||
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
|
||||
|===
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Referencing a `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `<mongo:mongo-client credentials="..." />`
|
||||
| `<mongo:mongo-client credential="..." />`
|
||||
| Single authentication data instead of list.
|
||||
|
||||
| `<mongo:client-options />`
|
||||
| `<mongo:client-settings />`
|
||||
| See `com.mongodb.MongoClientSettings` for details.
|
||||
|===
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
|
||||
| `<mongo:db-factory connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:mongo-client connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:client-settings />`
|
||||
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||
|
||||
|===
|
||||
|
||||
=== Java Configuration
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
| `MongoDataIntegrityViolationException`
|
||||
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||
|
||||
| `BulkOperationException`
|
||||
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||
|
||||
| `ReactiveMongoClientFactoryBean`
|
||||
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|
||||
| `ReactiveMongoClientSettingsFactoryBean`
|
||||
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|===
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
| Creating a `com.mongodb.MongoClientSettings`.
|
||||
|
||||
| `AbstractMongoConfiguration`
|
||||
| `AbstractMongoClientConfiguration` +
|
||||
(Available since 2.1)
|
||||
| Using `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `MongoDbFactory#getLegacyDb()`
|
||||
| -
|
||||
| -
|
||||
|
||||
| `SimpleMongoDbFactory`
|
||||
| `SimpleMongoClientDbFactory` +
|
||||
(Available since 2.1)
|
||||
|
|
||||
|
||||
| `MapReduceOptions#getOutputType()`
|
||||
| `MapReduceOptions#getMapReduceAction()`
|
||||
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||
|
||||
| `Meta\|Query` maxScan & snapshot
|
||||
|
|
||||
|
|
||||
|===
|
||||
|
||||
=== Other Changes
|
||||
|
||||
==== UUID Types
|
||||
|
||||
The MongoDB UUID representation can now be configured with different formats.
|
||||
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||
|
||||
.UUID Codec Configuration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly.
|
||||
@@ -1,39 +0,0 @@
|
||||
== Running CI tasks locally
|
||||
|
||||
Since Concourse is built on top of Docker, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your `test.sh` script before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what Concourse does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
Next, run the `test.sh` script from inside the container:
|
||||
+
|
||||
2. `PROFILE=none spring-data-mongodb-github/ci/test.sh`
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
+
|
||||
Next, run the `build.sh` script from inside the container:
|
||||
+
|
||||
3. `spring-data-mongodb-github/ci/build.sh`
|
||||
|
||||
IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything.
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk14:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,15 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
14
ci/openjdk8-mongodb-4.1/Dockerfile
Normal file
@@ -0,0 +1,14 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,15 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
47
pom.xml
47
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.1.22.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,20 +15,21 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.3.0.RC2</version>
|
||||
<version>2.1.22.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
</modules>
|
||||
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.3.0.RC2</springdata.commons>
|
||||
<mongo>4.0.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<springdata.commons>2.1.22.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>3.8.2</mongo>
|
||||
<mongo.reactivestreams>1.9.2</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -117,35 +118,45 @@
|
||||
<id>benchmarks</id>
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
<module>spring-data-mongodb-benchmarks</module>
|
||||
</modules>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>distribute</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
<!-- MongoDB -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.1.22.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,7 +87,6 @@
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<testSourceDirectory>${project.build.sourceDirectory}</testSourceDirectory>
|
||||
<testClassesDirectory>${project.build.outputDirectory}</testClassesDirectory>
|
||||
<excludes>
|
||||
|
||||
7
spring-data-mongodb-cross-store/aop.xml
Normal file
7
spring-data-mongodb-cross-store/aop.xml
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
148
spring-data-mongodb-cross-store/pom.xml
Normal file
148
spring-data-mongodb-cross-store/pom.xml
Normal file
@@ -0,0 +1,148 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.22.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>2.1.1</jpa>
|
||||
<hibernate>5.2.1.Final</hibernate>
|
||||
<java-module-name>spring.data.mongodb.cross.store</java-module-name>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.1.22.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-entitymanager</artifactId>
|
||||
<version>${hibernate}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>1.8.0.10</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
<version>${validation}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.2.4.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>aspectj-maven-plugin</artifactId>
|
||||
<version>1.6</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjtools</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<outxml>true</outxml>
|
||||
<aspectLibraries>
|
||||
<aspectLibrary>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,15 +13,16 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import lombok.Value;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Value
|
||||
class SumAge {
|
||||
@Deprecated
|
||||
public interface DocumentBacked extends ChangeSetBacked {
|
||||
|
||||
private Long total;
|
||||
}
|
||||
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.Filters;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
this.mongoTemplate = mongoTemplate;
|
||||
}
|
||||
|
||||
public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) {
|
||||
this.entityManagerFactory = entityManagerFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
|
||||
throws DataAccessException, NotFoundException {
|
||||
|
||||
if (id == null) {
|
||||
log.debug("Unable to load MongoDB data for null id");
|
||||
return;
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entityClass);
|
||||
|
||||
final Document dbk = new Document();
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
for (Document dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (cs == null) {
|
||||
log.debug("Flush: changeset was null, nothing to flush.");
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
if (mongoTemplate.getCollection(collName) == null) {
|
||||
mongoTemplate.createCollection(collName);
|
||||
}
|
||||
|
||||
for (String key : cs.getValues().keySet()) {
|
||||
if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) {
|
||||
Object value = cs.getValues().get(key);
|
||||
final Document dbQuery = new Document();
|
||||
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
|
||||
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
|
||||
dbQuery.put(ENTITY_FIELD_NAME, key);
|
||||
final Document dbId = mongoTemplate.execute(collName, new CollectionCallback<Document>() {
|
||||
public Document doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
Document id = collection.find(dbQuery).first();
|
||||
return id;
|
||||
}
|
||||
});
|
||||
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
DeleteResult dr = collection.deleteMany(dbQuery);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
final Document dbDoc = new Document();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
if (dbId != null) {
|
||||
dbDoc.put("_id", dbId.get("_id"));
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
if (dbId != null) {
|
||||
collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc);
|
||||
} else {
|
||||
|
||||
if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) {
|
||||
dbDoc.remove("_id");
|
||||
}
|
||||
collection.insertOne(dbDoc);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection the given entity type shall be persisted to.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getCollectionNameForEntity(Class<? extends ChangeSetBacked> entityClass) {
|
||||
return mongoTemplate.getCollectionName(entityClass);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.Entity;
|
||||
|
||||
import org.aspectj.lang.JoinPoint;
|
||||
import org.aspectj.lang.reflect.FieldSignature;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
import org.springframework.data.mongodb.crossstore.DocumentBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException;
|
||||
import org.springframework.data.crossstore.HashMapChangeSet;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
/**
|
||||
* Aspect to turn an object annotated with @Document into a persistent document using Mongo.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public aspect MongoDocumentBacking {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class);
|
||||
|
||||
// Aspect shared config
|
||||
private ChangeSetPersister<Object> changeSetPersister;
|
||||
|
||||
public void setChangeSetPersister(ChangeSetPersister<Object> changeSetPersister) {
|
||||
this.changeSetPersister = changeSetPersister;
|
||||
}
|
||||
|
||||
// ITD to introduce N state to Annotated objects
|
||||
declare parents : (@Entity *) implements DocumentBacked;
|
||||
|
||||
// The annotated fields that will be persisted in MongoDB rather than with JPA
|
||||
declare @field: @RelatedDocument * (@Entity+ *).*:@Transient;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Advise user-defined constructors of ChangeSetBacked objects to create a new
|
||||
// backing ChangeSet
|
||||
// -------------------------------------------------------------------------
|
||||
pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) :
|
||||
execution((DocumentBacked+).new(..)) &&
|
||||
!execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity);
|
||||
|
||||
pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) :
|
||||
execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity) &&
|
||||
args(cs);
|
||||
|
||||
protected pointcut entityFieldGet(DocumentBacked entity) :
|
||||
get(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
!get(* DocumentBacked.*);
|
||||
|
||||
protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) :
|
||||
set(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
args(newVal) &&
|
||||
!set(* DocumentBacked.*);
|
||||
|
||||
// intercept EntityManager.merge calls
|
||||
public pointcut entityManagerMerge(EntityManager em, Object entity) :
|
||||
call(* EntityManager.merge(Object)) &&
|
||||
target(em) &&
|
||||
args(entity);
|
||||
|
||||
// intercept EntityManager.remove calls
|
||||
// public pointcut entityManagerRemove(EntityManager em, Object entity) :
|
||||
// call(* EntityManager.remove(Object)) &&
|
||||
// target(em) &&
|
||||
// args(entity);
|
||||
|
||||
// move changeSet from detached entity to the newly merged persistent object
|
||||
Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) {
|
||||
Object mergedEntity = proceed(em, entity);
|
||||
if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) {
|
||||
((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet();
|
||||
}
|
||||
return mergedEntity;
|
||||
}
|
||||
|
||||
// clear changeSet from removed entity
|
||||
// Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) {
|
||||
// if (entity instanceof DocumentBacked) {
|
||||
// removeChangeSetValues((DocumentBacked)entity);
|
||||
// }
|
||||
// return proceed(em, entity);
|
||||
// }
|
||||
|
||||
private static void removeChangeSetValues(DocumentBacked entity) {
|
||||
LOGGER.debug("Removing all change-set values for " + entity);
|
||||
ChangeSet nulledCs = new HashMapChangeSet();
|
||||
DocumentBacked documentEntity = (DocumentBacked) entity;
|
||||
@SuppressWarnings("unchecked")
|
||||
ChangeSetPersister<Object> changeSetPersister = (ChangeSetPersister<Object>) documentEntity.itdChangeSetPersister;
|
||||
try {
|
||||
changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(),
|
||||
documentEntity.getChangeSet());
|
||||
} catch (DataAccessException e) {
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
for (String key : entity.getChangeSet().getValues().keySet()) {
|
||||
nulledCs.set(key, null);
|
||||
}
|
||||
entity.setChangeSet(nulledCs);
|
||||
}
|
||||
|
||||
before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) {
|
||||
LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass());
|
||||
// Populate all ITD fields
|
||||
entity.setChangeSet(new HashMapChangeSet());
|
||||
entity.itdChangeSetPersister = changeSetPersister;
|
||||
entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity);
|
||||
// registerTransactionSynchronization(entity);
|
||||
}
|
||||
|
||||
private static void registerTransactionSynchronization(DocumentBacked entity) {
|
||||
if (TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Adding transaction synchronization for " + entity);
|
||||
}
|
||||
TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization already active for " + entity);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization is not active for " + entity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// ChangeSet-related mixins
|
||||
// -------------------------------------------------------------------------
|
||||
// Introduced field
|
||||
@Transient
|
||||
private ChangeSet DocumentBacked.changeSet;
|
||||
|
||||
@Transient
|
||||
private ChangeSetPersister<?> DocumentBacked.itdChangeSetPersister;
|
||||
|
||||
@Transient
|
||||
private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization;
|
||||
|
||||
public void DocumentBacked.setChangeSet(ChangeSet cs) {
|
||||
this.changeSet = cs;
|
||||
}
|
||||
|
||||
public ChangeSet DocumentBacked.getChangeSet() {
|
||||
return changeSet;
|
||||
}
|
||||
|
||||
// Flush the entity state to the persistent store
|
||||
public void DocumentBacked.flush() {
|
||||
Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
itdChangeSetPersister.persistState(this, this.changeSet);
|
||||
}
|
||||
|
||||
public Object DocumentBacked.get_persistent_id() {
|
||||
return itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
}
|
||||
|
||||
// lifecycle methods
|
||||
@javax.persistence.PostPersist
|
||||
public void DocumentBacked.itdPostPersist() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName());
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PreUpdate
|
||||
public void DocumentBacked.itdPreUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostUpdate
|
||||
public void DocumentBacked.itdPostUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostRemove
|
||||
public void DocumentBacked.itdPostRemove() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
removeChangeSetValues(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostLoad
|
||||
public void DocumentBacked.itdPostLoad() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field reads to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity): entityFieldGet(entity) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet());
|
||||
if (entity.getChangeSet().getValues().get(propName) == null) {
|
||||
try {
|
||||
this.changeSetPersister
|
||||
.getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet());
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
}
|
||||
Object fValue = entity.getChangeSet().getValues().get(propName);
|
||||
if (fValue != null) {
|
||||
return fValue;
|
||||
}
|
||||
return proceed(entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field writes to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]");
|
||||
entity.getChangeSet().set(propName, newVal);
|
||||
return proceed(entity, newVal);
|
||||
}
|
||||
|
||||
Field field(JoinPoint joinPoint) {
|
||||
FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature();
|
||||
return fieldSignature.getField();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,27 +13,19 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.test.util;
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Marks a field or method as to be autowired by JUnit's dependency injection facilities for injection of a MongoDB
|
||||
* client instance connected to a replica set. Depends on {@link MongoClientExtension}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see com.mongodb.client.MongoClient
|
||||
* @see com.mongodb.reactivestreams.client.MongoClient
|
||||
* @see Client
|
||||
* @see MongoClientExtension
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Target({ ElementType.FIELD, ElementType.PARAMETER })
|
||||
@Deprecated
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Documented
|
||||
public @interface ReplSetClient {
|
||||
|
||||
@Target({ ElementType.FIELD })
|
||||
public @interface RelatedDocument {
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Infrastructure for Spring Data's MongoDB cross store support.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
@@ -0,0 +1,195 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.crossstore.test.Address;
|
||||
import org.springframework.data.mongodb.crossstore.test.Person;
|
||||
import org.springframework.data.mongodb.crossstore.test.Resume;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
/**
|
||||
* Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}).
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml")
|
||||
public class CrossStoreMongoTests {
|
||||
|
||||
@Autowired MongoTemplate mongoTemplate;
|
||||
|
||||
@PersistenceContext EntityManager entityManager;
|
||||
|
||||
@Autowired PlatformTransactionManager transactionManager;
|
||||
TransactionTemplate txTemplate;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
txTemplate = new TransactionTemplate(transactionManager);
|
||||
|
||||
clearData(Person.class);
|
||||
|
||||
Address address = new Address(12, "MAin St.", "Boston", "MA", "02101");
|
||||
|
||||
Resume resume = new Resume();
|
||||
resume.addEducation("Skanstulls High School, 1975");
|
||||
resume.addEducation("Univ. of Stockholm, 1980");
|
||||
resume.addJob("DiMark, DBA, 1990-2000");
|
||||
resume.addJob("VMware, Developer, 2007-");
|
||||
|
||||
final Person person = new Person("Thomas", 20);
|
||||
person.setAddress(address);
|
||||
person.setResume(resume);
|
||||
person.setId(1L);
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.persist(person);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.remove(entityManager.find(Person.class, 1L));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void clearData(Class<?> domainType) {
|
||||
|
||||
String collectionName = mongoTemplate.getCollectionName(domainType);
|
||||
mongoTemplate.dropCollection(collectionName);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testReadJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testUpdatedJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
found.setAge(44);
|
||||
found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006");
|
||||
|
||||
entityManager.merge(found);
|
||||
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; "
|
||||
+ "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeJpaEntityWithMongoDocument() {
|
||||
|
||||
final Person detached = entityManager.find(Person.class, 1L);
|
||||
entityManager.detach(detached);
|
||||
detached.getResume().addJob("TargetRx, Developer, 2000-2005");
|
||||
|
||||
Person merged = txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person result = entityManager.merge(detached);
|
||||
entityManager.flush();
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
final Person updated = entityManager.find(Person.class, 1L);
|
||||
Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveJpaEntityWithMongoDocument() {
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person p2 = new Person("Thomas", 20);
|
||||
Resume r2 = new Resume();
|
||||
r2.addEducation("Skanstulls High School, 1975");
|
||||
r2.addJob("DiMark, DBA, 1990-2000");
|
||||
p2.setResume(r2);
|
||||
p2.setId(2L);
|
||||
entityManager.persist(p2);
|
||||
Person p3 = new Person("Thomas", 20);
|
||||
Resume r3 = new Resume();
|
||||
r3.addEducation("Univ. of Stockholm, 1980");
|
||||
r3.addJob("VMware, Developer, 2007-");
|
||||
p3.setResume(r3);
|
||||
p3.setId(3L);
|
||||
entityManager.persist(p3);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
final Person found2 = entityManager.find(Person.class, 2L);
|
||||
entityManager.remove(found2);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
boolean weFound3 = false;
|
||||
|
||||
for (Document dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) {
|
||||
Assert.assertTrue(!dbo.get("_entity_id").equals(2L));
|
||||
if (dbo.get("_entity_id").equals(3L)) {
|
||||
weFound3 = true;
|
||||
}
|
||||
}
|
||||
Assert.assertTrue(weFound3);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
public class Address {
|
||||
|
||||
private Integer streetNumber;
|
||||
private String streetName;
|
||||
private String city;
|
||||
private String state;
|
||||
private String zip;
|
||||
|
||||
public Address(Integer streetNumber, String streetName, String city, String state, String zip) {
|
||||
super();
|
||||
this.streetNumber = streetNumber;
|
||||
this.streetName = streetName;
|
||||
this.city = city;
|
||||
this.state = state;
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
public Integer getStreetNumber() {
|
||||
return streetNumber;
|
||||
}
|
||||
|
||||
public void setStreetNumber(Integer streetNumber) {
|
||||
this.streetNumber = streetNumber;
|
||||
}
|
||||
|
||||
public String getStreetName() {
|
||||
return streetName;
|
||||
}
|
||||
|
||||
public void setStreetName(String streetName) {
|
||||
this.streetName = streetName;
|
||||
}
|
||||
|
||||
public String getCity() {
|
||||
return city;
|
||||
}
|
||||
|
||||
public void setCity(String city) {
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
public String getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(String state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public String getZip() {
|
||||
return zip;
|
||||
}
|
||||
|
||||
public void setZip(String zip) {
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
Long id;
|
||||
|
||||
private String name;
|
||||
|
||||
private int age;
|
||||
|
||||
private java.util.Date birthDate;
|
||||
|
||||
@RelatedDocument
|
||||
private Address address;
|
||||
|
||||
@RelatedDocument
|
||||
private Resume resume;
|
||||
|
||||
public Person() {
|
||||
}
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
this.birthDate = new java.util.Date();
|
||||
}
|
||||
|
||||
public void birthday() {
|
||||
++age;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
public void setAge(int age) {
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public java.util.Date getBirthDate() {
|
||||
return birthDate;
|
||||
}
|
||||
|
||||
public void setBirthDate(java.util.Date birthDate) {
|
||||
this.birthDate = birthDate;
|
||||
}
|
||||
|
||||
public Resume getResume() {
|
||||
return resume;
|
||||
}
|
||||
|
||||
public void setResume(Resume resume) {
|
||||
this.resume = resume;
|
||||
}
|
||||
|
||||
public Address getAddress() {
|
||||
return address;
|
||||
}
|
||||
|
||||
public void setAddress(Address address) {
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
@Document
|
||||
public class Resume {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(Resume.class);
|
||||
|
||||
@Id
|
||||
private ObjectId id;
|
||||
|
||||
private String education = "";
|
||||
|
||||
private String jobs = "";
|
||||
|
||||
public String getId() {
|
||||
return id.toString();
|
||||
}
|
||||
|
||||
public String getEducation() {
|
||||
return education;
|
||||
}
|
||||
|
||||
public void addEducation(String education) {
|
||||
LOGGER.debug("Adding education " + education);
|
||||
this.education = this.education + (this.education.length() > 0 ? "; " : "") + education;
|
||||
}
|
||||
|
||||
public String getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public void addJob(String job) {
|
||||
LOGGER.debug("Adding job " + job);
|
||||
this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Resume [education=" + education + ", jobs=" + jobs + "]";
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<persistence xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
version="2.0"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd">
|
||||
<persistence-unit name="test" transaction-type="RESOURCE_LOCAL">
|
||||
<provider>org.hibernate.ejb.HibernatePersistence</provider>
|
||||
<class>org.springframework.data.mongodb.crossstore.test.Person</class>
|
||||
<properties>
|
||||
<property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
<!--value='create' to build a new database on each run; value='update' to modify an existing database; value='create-drop' means the same as 'create' but also drops tables when Hibernate closes; value='validate' makes no changes to the database-->
|
||||
<property name="hibernate.hbm2ddl.auto" value="update"/>
|
||||
<property name="hibernate.ejb.naming_strategy" value="org.hibernate.cfg.ImprovedNamingStrategy"/>
|
||||
</properties>
|
||||
</persistence-unit>
|
||||
</persistence>
|
||||
@@ -0,0 +1,72 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:tx="http://www.springframework.org/schema/tx"
|
||||
xmlns:jdbc="http://www.springframework.org/schema/jdbc"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/jdbc https://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd
|
||||
http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx-3.0.xsd
|
||||
http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
||||
|
||||
<context:spring-configured/>
|
||||
|
||||
<context:component-scan base-package="org.springframework.persistence.mongodb.test">
|
||||
<context:exclude-filter expression="org.springframework.stereotype.Controller" type="annotation"/>
|
||||
</context:component-scan>
|
||||
|
||||
<mongo:mapping-converter/>
|
||||
|
||||
<!-- Mongo config -->
|
||||
<bean id="mongoClient" class="org.springframework.data.mongodb.core.MongoClientFactoryBean">
|
||||
<property name="host" value="localhost"/>
|
||||
<property name="port" value="27017"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoDbFactory" class="org.springframework.data.mongodb.core.SimpleMongoDbFactory">
|
||||
<constructor-arg name="mongoClient" ref="mongoClient"/>
|
||||
<constructor-arg name="databaseName" value="database"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg name="mongoDbFactory" ref="mongoDbFactory"/>
|
||||
<constructor-arg name="mongoConverter" ref="mappingConverter"/>
|
||||
</bean>
|
||||
|
||||
<bean class="org.springframework.data.mongodb.core.MongoExceptionTranslator"/>
|
||||
|
||||
<!-- Mongo aspect config -->
|
||||
<bean class="org.springframework.data.mongodb.crossstore.MongoDocumentBacking"
|
||||
factory-method="aspectOf">
|
||||
<property name="changeSetPersister" ref="mongoChangeSetPersister"/>
|
||||
</bean>
|
||||
<bean id="mongoChangeSetPersister"
|
||||
class="org.springframework.data.mongodb.crossstore.MongoChangeSetPersister">
|
||||
<property name="mongoTemplate" ref="mongoTemplate"/>
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<jdbc:embedded-database id="dataSource" type="HSQL">
|
||||
</jdbc:embedded-database>
|
||||
|
||||
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<tx:annotation-driven mode="aspectj" transaction-manager="transactionManager"/>
|
||||
|
||||
<bean class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean" id="entityManagerFactory">
|
||||
<property name="persistenceUnitName" value="test"/>
|
||||
<property name="dataSource" ref="dataSource"/>
|
||||
<property name="jpaVendorAdapter">
|
||||
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
|
||||
<property name="showSql" value="true"/>
|
||||
<property name="generateDdl" value="true"/>
|
||||
<property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d %5p %40.40c:%4L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!--
|
||||
<logger name="org.springframework" level="debug" />
|
||||
-->
|
||||
|
||||
<root level="error">
|
||||
<appender-ref ref="console" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
||||
@@ -1,6 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -14,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.1.22.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -32,15 +31,8 @@
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.0.0.RC2</version>
|
||||
<version>2.1.22.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -65,12 +65,6 @@
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -88,14 +82,6 @@
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-sync</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
@@ -103,6 +89,23 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>bson</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
@@ -250,13 +253,6 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.junit-pioneer</groupId>
|
||||
<artifactId>junit-pioneer</artifactId>
|
||||
<version>0.5.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
@@ -268,25 +264,20 @@
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-reactor</artifactId>
|
||||
<optional>true</optional>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -331,7 +322,6 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<useFile>false</useFile>
|
||||
<includes>
|
||||
<include>**/*Tests.java</include>
|
||||
@@ -344,6 +334,12 @@
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
@@ -19,9 +19,9 @@ import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
@@ -38,12 +38,12 @@ public class BulkOperationException extends DataAccessException {
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}.
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, MongoBulkWriteException source) {
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright 2010-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Exception being thrown in case we cannot connect to a MongoDB instance.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException {
|
||||
|
||||
private final UserCredentials credentials;
|
||||
private final @Nullable String database;
|
||||
|
||||
private static final long serialVersionUID = 1172099106475265589L;
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
this.database = null;
|
||||
this.credentials = UserCredentials.NO_CREDENTIALS;
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg) {
|
||||
this(msg, null, UserCredentials.NO_CREDENTIALS);
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, @Nullable String database, UserCredentials credentials) {
|
||||
super(msg);
|
||||
this.database = database;
|
||||
this.credentials = credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public UserCredentials getCredentials() {
|
||||
return this.credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the database trying to be accessed.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} from the underlying factory.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDatabaseFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,7 @@ import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDbFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <p />
|
||||
@@ -41,94 +41,93 @@ import com.mongodb.client.MongoDatabase;
|
||||
public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory) {
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory) {
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory,
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDbFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
ClientSession session = doGetSession(factory, sessionSynchronization);
|
||||
|
||||
if (session == null) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
}
|
||||
|
||||
MongoDatabaseFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase();
|
||||
MongoDbFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getDb(dbName) : factoryToUse.getDb();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active
|
||||
* transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory
|
||||
* resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active
|
||||
* transaction}.
|
||||
* Check if the {@link MongoDbFactory} is actually bound to a {@link ClientSession} that has an active transaction, or
|
||||
* if a {@link TransactionSynchronization} has been registered for the {@link MongoDbFactory resource} and if the
|
||||
* associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param dbFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return {@literal true} if the factory has an ongoing transaction.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) {
|
||||
public static boolean isTransactionActive(MongoDbFactory dbFactory) {
|
||||
|
||||
if (dbFactory.isTransactionActive()) {
|
||||
return true;
|
||||
@@ -139,8 +138,7 @@ public class MongoDatabaseUtils {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static ClientSession doGetSession(MongoDatabaseFactory dbFactory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory);
|
||||
|
||||
@@ -171,7 +169,7 @@ public class MongoDatabaseUtils {
|
||||
return resourceHolder.getSession();
|
||||
}
|
||||
|
||||
private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) {
|
||||
private static ClientSession createClientSession(MongoDbFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
@@ -186,7 +184,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
private final MongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) {
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDbFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory);
|
||||
this.resourceHolder = resourceHolder;
|
||||
|
||||
@@ -15,8 +15,14 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -25,33 +31,92 @@ import com.mongodb.client.MongoDatabase;
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||
public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb() throws DataAccessException {
|
||||
return getMongoDatabase();
|
||||
}
|
||||
MongoDatabase getDb() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Creates a {@link DB} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||
*/
|
||||
MongoDatabase getDb(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the legacy database entry point. Please consider {@link #getDb()} instead.
|
||||
*
|
||||
* @return
|
||||
* @deprecated since 2.1, use {@link #getDb()}. This method will be removed with a future version as it works only
|
||||
* with the legacy MongoDB driver.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return getMongoDatabase(dbName);
|
||||
DB getLegacyDb();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getDb().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDbFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDbFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDbFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,15 +36,15 @@ import com.mongodb.client.ClientSession;
|
||||
class MongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private MongoDatabaseFactory dbFactory;
|
||||
private MongoDbFactory dbFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) {
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDbFactory dbFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -75,9 +75,9 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link MongoDatabaseFactory}.
|
||||
* @return the associated {@link MongoDbFactory}.
|
||||
*/
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -36,18 +36,17 @@ import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDbFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDbFactory} to the thread.
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call.
|
||||
* Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
@@ -59,46 +58,46 @@ import com.mongodb.client.ClientSession;
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
*/
|
||||
public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
implements ResourceTransactionManager, InitializingBean {
|
||||
|
||||
private @Nullable MongoDatabaseFactory dbFactory;
|
||||
private @Nullable MongoDbFactory dbFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <strong>Note:</strong>The {@link MongoDbFactory db factory} has to be {@link #setDbFactory(MongoDbFactory) set}
|
||||
* before using the instance. Use this constructor to prepare a {@link MongoTransactionManager} via a
|
||||
* {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDbFactory(MongoDatabaseFactory)
|
||||
* @see #setDbFactory(MongoDbFactory)
|
||||
* @see #setTransactionSynchronization(int)
|
||||
*/
|
||||
public MongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory}.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory) {
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory) {
|
||||
this(dbFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}
|
||||
* applying the given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory} applying the
|
||||
* given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
|
||||
@@ -296,11 +295,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoDatabaseFactory} that this instance should manage transactions for.
|
||||
* Set the {@link MongoDbFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||
public void setDbFactory(MongoDbFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -316,12 +315,12 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link MongoDatabaseFactory} that this instance manages transactions for.
|
||||
* Get the {@link MongoDbFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
public MongoDbFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
@@ -330,7 +329,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory getResourceFactory() {
|
||||
public MongoDbFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
@@ -345,7 +344,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) {
|
||||
|
||||
MongoDatabaseFactory dbFactory = getResourceFactory();
|
||||
MongoDbFactory dbFactory = getResourceFactory();
|
||||
|
||||
MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory);
|
||||
resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition));
|
||||
@@ -356,7 +355,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #dbFactory} is {@literal null}.
|
||||
*/
|
||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||
private MongoDbFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
@@ -31,7 +32,6 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
@@ -42,16 +42,16 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
Mono<MongoDatabase> getMongoDatabase() throws DataAccessException;
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Creates a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
Mono<MongoDatabase> getMongoDatabase(String dbName) throws DataAccessException;
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
@@ -65,7 +65,10 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CodecRegistry getCodecRegistry();
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}.
|
||||
@@ -85,16 +88,4 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveMongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.2
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,278 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.context.Context;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.NoTransactionException;
|
||||
import org.springframework.transaction.reactive.ReactiveResourceSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a
|
||||
* {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the
|
||||
* {@link ReactiveMongoDatabaseFactory resource} and if the associated
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param databaseFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction.
|
||||
*/
|
||||
public static Mono<Boolean> isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
if (databaseFactory.isTransactionActive()) {
|
||||
return Mono.just(true);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction() //
|
||||
.map(it -> {
|
||||
|
||||
ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory);
|
||||
return holder != null && holder.hasActiveTransaction();
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> Mono.just(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory))
|
||||
.switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> doGetSession(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(dbFactory);
|
||||
|
||||
// check for native MongoDB transaction
|
||||
if (registeredHolder != null
|
||||
&& (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) {
|
||||
|
||||
return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession())
|
||||
: createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent);
|
||||
}
|
||||
|
||||
if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
// init a non native MongoDB transaction by registering a MongoSessionSynchronization
|
||||
return createClientSession(dbFactory).map(session -> {
|
||||
|
||||
ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory);
|
||||
newHolder.getRequiredSession().startTransaction();
|
||||
|
||||
synchronizationManager
|
||||
.registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory));
|
||||
newHolder.setSynchronizedWithTransaction(true);
|
||||
synchronizationManager.bindResource(dbFactory, newHolder);
|
||||
|
||||
return newHolder.getSession();
|
||||
});
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> createClientSession(ReactiveMongoDatabaseFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when
|
||||
* participating in a non-native MongoDB transaction, such as a R2CBC transaction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
private static class MongoSessionSynchronization
|
||||
extends ReactiveResourceSynchronization<ReactiveMongoResourceHolder, Object> {
|
||||
|
||||
private final ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory, synchronizationManager);
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (isTransactionActive(resourceHolder)) {
|
||||
return Mono.from(resourceHolder.getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> afterCompletion(int status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) {
|
||||
|
||||
return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) //
|
||||
.then(super.afterCompletion(status));
|
||||
}
|
||||
|
||||
return super.afterCompletion(status);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
if (resourceHolder.hasActiveSession()) {
|
||||
resourceHolder.getRequiredSession().close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (!resourceHolder.hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return resourceHolder.getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,155 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.support.ResourceHolderSupport;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoTransactionManager
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private ReactiveMongoDatabaseFactory databaseFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ClientSession}. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
ClientSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the required associated {@link ClientSession}.
|
||||
* @throws IllegalStateException if no session is associated.
|
||||
*/
|
||||
ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No ClientSession associated");
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ReactiveMongoDatabaseFactory}.
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClientSession} to guard.
|
||||
*
|
||||
* @param session can be {@literal null}.
|
||||
*/
|
||||
public void setSession(@Nullable ClientSession session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if session is not {@literal null}.
|
||||
*/
|
||||
boolean hasSession() {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public ClientSession setSessionIfAbsent(@Nullable ClientSession session) {
|
||||
|
||||
if (!hasSession()) {
|
||||
setSession(session);
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session is active and has not been closed.
|
||||
*/
|
||||
boolean hasActiveSession() {
|
||||
|
||||
if (!hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hasServerSession() && !getRequiredSession().getServerSession().isClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session has an active transaction.
|
||||
* @see #hasActiveSession()
|
||||
*/
|
||||
boolean hasActiveTransaction() {
|
||||
|
||||
if (!hasActiveSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated
|
||||
* that is accessible via {@link ClientSession#getServerSession()}.
|
||||
*/
|
||||
boolean hasServerSession() {
|
||||
|
||||
try {
|
||||
return getRequiredSession().getServerSession() != null;
|
||||
} catch (IllegalStateException serverSessionClosed) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,530 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionException;
|
||||
import org.springframework.transaction.TransactionSystemException;
|
||||
import org.springframework.transaction.reactive.AbstractReactiveTransactionManager;
|
||||
import org.springframework.transaction.reactive.GenericReactiveTransaction;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.SmartTransactionObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.TransactionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
* behavior as outlined in the MongoDB reference manual.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean {
|
||||
|
||||
private @Nullable ReactiveMongoDatabaseFactory databaseFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDatabaseFactory(ReactiveMongoDatabaseFactory)
|
||||
*/
|
||||
public ReactiveMongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory}.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
this(databaseFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when
|
||||
* starting a new transaction.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
@Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
|
||||
this.databaseFactory = databaseFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||
throws TransactionException {
|
||||
|
||||
ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(getRequiredDatabaseFactory());
|
||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||
TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
|
||||
Mono<ReactiveMongoResourceHolder> holder = newResourceHolder(definition,
|
||||
ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
|
||||
return holder.doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.setResourceHolder(resourceHolder);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
}).doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.startTransaction(options);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
})//
|
||||
.onErrorMap(
|
||||
ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex))
|
||||
.doOnSuccess(resourceHolder -> {
|
||||
|
||||
synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder);
|
||||
}).then();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||
throws TransactionException {
|
||||
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
mongoTransactionObject.setResourceHolder(null);
|
||||
|
||||
return synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||
Object suspendedResources) {
|
||||
return Mono
|
||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to commit transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> {
|
||||
return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook to perform an actual commit of the given transaction.<br />
|
||||
* If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding
|
||||
* {@literal error labels}. <br />
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit.
|
||||
*
|
||||
* @param synchronizationManager reactive synchronization manager.
|
||||
* @param transactionObject never {@literal null}.
|
||||
*/
|
||||
protected Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoTransactionObject transactionObject) {
|
||||
return transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to abort transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> {
|
||||
return Mono
|
||||
.error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status);
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||
Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction;
|
||||
|
||||
// Remove the connection holder from the thread.
|
||||
synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
mongoTransactionObject.getRequiredResourceHolder().clear();
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to release Session %s after transaction.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
mongoTransactionObject.closeSession();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link TransactionOptions} to be applied when starting transactions.
|
||||
*
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public void setOptions(@Nullable TransactionOptions options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDatabaseFactory();
|
||||
}
|
||||
|
||||
private Mono<ReactiveMongoResourceHolder> newResourceHolder(TransactionDefinition definition,
|
||||
ClientSessionOptions options) {
|
||||
|
||||
ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory();
|
||||
|
||||
return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory));
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #databaseFactory} is {@literal null}.
|
||||
*/
|
||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||
|
||||
Assert.state(databaseFactory != null,
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) transaction;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(),
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
status.getTransaction().getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) status.getTransaction();
|
||||
}
|
||||
|
||||
private static String debugString(@Nullable ClientSession session) {
|
||||
|
||||
if (session == null) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()),
|
||||
Integer.toHexString(session.hashCode()));
|
||||
|
||||
try {
|
||||
if (session.getServerSession() != null) {
|
||||
debugString += String.format("id = %s, ", session.getServerSession().getIdentifier());
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
debugString += String.format("error = %s", e.getMessage());
|
||||
}
|
||||
|
||||
debugString += "]";
|
||||
|
||||
return debugString;
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by
|
||||
* {@link ReactiveMongoTransactionManager}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoResourceHolder
|
||||
*/
|
||||
protected static class ReactiveMongoTransactionObject implements SmartTransactionObject {
|
||||
|
||||
private @Nullable ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoResourceHolder}.
|
||||
*
|
||||
* @param resourceHolder can be {@literal null}.
|
||||
*/
|
||||
void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a {@link MongoResourceHolder} is set.
|
||||
*/
|
||||
final boolean hasResourceHolder() {
|
||||
return resourceHolder != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a MongoDB transaction optionally given {@link TransactionOptions}.
|
||||
*
|
||||
* @param options can be {@literal null}
|
||||
*/
|
||||
void startTransaction(@Nullable TransactionOptions options) {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (options != null) {
|
||||
session.startTransaction(options);
|
||||
} else {
|
||||
session.startTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction.
|
||||
*/
|
||||
public Mono<Void> commitTransaction() {
|
||||
return Mono.from(getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback (abort) the transaction.
|
||||
*/
|
||||
public Mono<Void> abortTransaction() {
|
||||
return Mono.from(getRequiredSession().abortTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a {@link ClientSession} without regard to its transactional state.
|
||||
*/
|
||||
void closeSession() {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (session.getServerSession() != null && !session.getServerSession().isClosed()) {
|
||||
session.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public ClientSession getSession() {
|
||||
return resourceHolder != null ? resourceHolder.getSession() : null;
|
||||
}
|
||||
|
||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
throw new UnsupportedOperationException("flush() not supported");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoDriverInformation;
|
||||
|
||||
/**
|
||||
* Class that exposes the SpringData MongoDB specific information like the current {@link Version} or
|
||||
* {@link MongoDriverInformation driver information}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
.builder(MongoDriverInformation.builder().build()).driverName("spring-data").build();
|
||||
|
||||
/**
|
||||
* Obtain the SpringData MongoDB specific driver information.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static MongoDriverInformation driverInformation() {
|
||||
return DRIVER_INFORMATION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <p />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static Version version() {
|
||||
|
||||
Package pkg = SpringDataMongoDB.class.getPackage();
|
||||
String versionString = (pkg != null ? pkg.getImplementationVersion() : null);
|
||||
|
||||
if (!StringUtils.hasText(versionString)) {
|
||||
|
||||
LOGGER.debug("Unable to find Spring Data MongoDB version.");
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString);
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -17,22 +17,17 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}.
|
||||
@@ -40,44 +35,40 @@ import com.mongodb.client.MongoClients;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoConfiguration
|
||||
*/
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
@Configuration
|
||||
public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
* @return
|
||||
*/
|
||||
public MongoClient mongoClient() {
|
||||
return createMongoClient(mongoClientSettings());
|
||||
}
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @see #mongoDbFactory()
|
||||
* @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
|
||||
return new MongoTemplate(databaseFactory, converter);
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the
|
||||
* {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}.
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter)
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDatabaseFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -100,32 +91,21 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given
|
||||
* {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.MongoClient}.
|
||||
* <p />
|
||||
* <strong>INFO:</strong>In case you want to use {@link com.mongodb.client.MongoClients} for configuration please refer
|
||||
* to {@link AbstractMongoClientConfiguration}.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Ryan Tenney
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoClientConfiguration
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -18,19 +18,13 @@ package org.springframework.data.mongodb.config;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for reactive Spring Data MongoDB configuration using JavaConfig.
|
||||
@@ -40,33 +34,25 @@ import com.mongodb.reactivestreams.client.MongoClients;
|
||||
* @since 2.0
|
||||
* @see MongoConfigurationSupport
|
||||
*/
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
@Configuration
|
||||
public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
*/
|
||||
public MongoClient reactiveMongoClient() {
|
||||
return createReactiveMongoClient(mongoClientSettings());
|
||||
}
|
||||
public abstract MongoClient reactiveMongoClient();
|
||||
|
||||
/**
|
||||
* Creates {@link ReactiveMongoOperations}.
|
||||
*
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MappingMongoConverter mongoConverter) {
|
||||
return new ReactiveMongoTemplate(databaseFactory, mongoConverter);
|
||||
public ReactiveMongoOperations reactiveMongoTemplate() throws Exception {
|
||||
return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -74,7 +60,7 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
* {@link MongoClient} instance configured in {@link #reactiveMongoClient()}.
|
||||
*
|
||||
* @see #reactiveMongoClient()
|
||||
* @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter)
|
||||
* @see #reactiveMongoTemplate()
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
@@ -84,31 +70,20 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext()
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createReactiveMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link com.mongodb.ConnectionString}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String connectionString) {
|
||||
|
||||
if (!StringUtils.hasText(connectionString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ConnectionString(connectionString));
|
||||
}
|
||||
}
|
||||
@@ -51,6 +51,7 @@ import org.springframework.core.type.filter.AssignableTypeFilter;
|
||||
import org.springframework.core.type.filter.TypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
@@ -96,14 +97,13 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
String autoIndexCreation = element.getAttribute("auto-index-creation");
|
||||
boolean autoIndexCreationEnabled = StringUtils.hasText(autoIndexCreation) && Boolean.valueOf(autoIndexCreation);
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element));
|
||||
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id);
|
||||
|
||||
createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element);
|
||||
|
||||
// Need a reference to a Mongo instance
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
if (!StringUtils.hasText(dbFactoryRef)) {
|
||||
@@ -202,11 +202,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) {
|
||||
return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false);
|
||||
}
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) {
|
||||
|
||||
String ctxRef = element.getAttribute("mapping-context-ref");
|
||||
|
||||
@@ -234,8 +229,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation);
|
||||
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
@@ -355,6 +348,20 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context,
|
||||
Element element) {
|
||||
|
||||
BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(MappingContextIsNewStrategyFactory.class);
|
||||
mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef);
|
||||
|
||||
BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context);
|
||||
context.registerBeanComponent(
|
||||
builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY_BEAN_NAME));
|
||||
|
||||
return IS_NEW_STRATEGY_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches.
|
||||
*
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
@@ -27,33 +26,25 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information
|
||||
* on an entity.
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on
|
||||
* an entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getBeanClass(Element element) {
|
||||
return AuditingEntityCallback.class;
|
||||
return AuditingEventListener.class;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -89,24 +80,7 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
mappingContextRef);
|
||||
parser.parse(element, parserContext);
|
||||
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element));
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler,
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry,
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition());
|
||||
builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,23 +32,17 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
@@ -110,27 +104,12 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||
.rootBeanDefinition(AuditingEventListener.class);
|
||||
listenerBeanDefinitionBuilder
|
||||
.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(registry, auditingHandlerDefinition.getSource());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -50,11 +50,10 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
|
||||
ParsingUtils.setPropertyValue(builder, element, "port", "port");
|
||||
ParsingUtils.setPropertyValue(builder, element, "host", "host");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credential", "credential");
|
||||
ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet");
|
||||
ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials");
|
||||
|
||||
MongoParsingUtils.parseMongoClientSettings(element, builder);
|
||||
MongoParsingUtils.parseMongoClientOptions(element, builder);
|
||||
MongoParsingUtils.parseReplicaSet(element, builder);
|
||||
|
||||
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME;
|
||||
|
||||
@@ -63,34 +62,22 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId);
|
||||
parserContext.registerBeanComponent(mongoComponent);
|
||||
|
||||
BeanComponentDefinition connectionStringPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(connectionStringPropertyEditor);
|
||||
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder());
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils
|
||||
.getServerAddressPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(serverAddressPropertyEditor);
|
||||
|
||||
BeanComponentDefinition writeConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder());
|
||||
BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils
|
||||
.getWriteConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(writeConcernEditor);
|
||||
|
||||
BeanComponentDefinition readConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readConcernEditor);
|
||||
|
||||
BeanComponentDefinition readPreferenceEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder());
|
||||
BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils
|
||||
.getReadPreferencePropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readPreferenceEditor);
|
||||
|
||||
BeanComponentDefinition credentialsEditor = helper
|
||||
.getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor());
|
||||
BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils
|
||||
.getMongoCredentialPropertyEditor());
|
||||
parserContext.registerBeanComponent(credentialsEditor);
|
||||
|
||||
BeanComponentDefinition uuidRepresentationEditor = helper
|
||||
.getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder());
|
||||
parserContext.registerBeanComponent(uuidRepresentationEditor);
|
||||
|
||||
parserContext.popAndRegisterContainingComponent();
|
||||
|
||||
return mongoComponent.getBeanDefinition();
|
||||
|
||||
@@ -15,12 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
@@ -28,19 +28,20 @@ import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.context.PersistentEntities;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB to be extended for JavaConfiguration usage.
|
||||
*
|
||||
@@ -80,44 +81,39 @@ public abstract class MongoConfigurationSupport {
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}.
|
||||
*
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException {
|
||||
|
||||
return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(
|
||||
new PersistentEntities(Arrays.<MappingContext<?, ?>> asList(new MappingContext[] { mongoMappingContext() }))));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the
|
||||
* {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link #mongoMappingContext()}.
|
||||
* Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
* <p>
|
||||
* <strong>NOTE:</strong> Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB
|
||||
* native simple types and register custom {@link Converter converters}.
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
* {@link #mongoMappingContext()}. Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public MongoCustomConversions customConversions() {
|
||||
return MongoCustomConversions.create(this::configureConverters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration hook for {@link MongoCustomConversions} creation.
|
||||
*
|
||||
* @param converterConfigurationAdapter never {@literal null}.
|
||||
* @since 2.3
|
||||
* @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs()
|
||||
* @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs()
|
||||
*/
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
public CustomConversions customConversions() {
|
||||
return new MongoCustomConversions(Collections.emptyList());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -194,41 +190,4 @@ public abstract class MongoConfigurationSupport {
|
||||
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
|
||||
: PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}. <br />
|
||||
* Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings mongoClientSettings() {
|
||||
|
||||
MongoClientSettings.Builder builder = MongoClientSettings.builder();
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
configureClientSettings(builder);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure {@link MongoClientSettings} via its {@link Builder} API.
|
||||
*
|
||||
* @param builder never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
// customization hook
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@@ -27,7 +26,6 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
@@ -80,23 +78,12 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if ("MONGODB-CR".equals(authMechanism)) {
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
|
||||
Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class,
|
||||
"createMongoCRCredential", String.class, String.class, char[].class);
|
||||
|
||||
if (createCRCredentialMethod == null) {
|
||||
throw new IllegalArgumentException("MONGODB-CR is no longer supported.");
|
||||
}
|
||||
|
||||
MongoCredential credential = MongoCredential.class
|
||||
.cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database,
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
credentials.add(credential);
|
||||
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
|
||||
@@ -32,12 +32,14 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s.
|
||||
@@ -82,11 +84,10 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
// Common setup
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(SimpleMongoClientDatabaseFactory.class);
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
|
||||
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
BeanDefinition mongoUri = getConnectionString(element, parserContext);
|
||||
BeanDefinition mongoUri = getMongoUri(element, parserContext);
|
||||
|
||||
if (mongoUri != null) {
|
||||
|
||||
@@ -96,8 +97,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-client-ref");
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
// Defaulting
|
||||
@@ -119,8 +119,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the
|
||||
* name under which the {@link com.mongodb.client.MongoClient} instance was registered under.
|
||||
* Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the
|
||||
* {@link Mongo} instance was registered under.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param parserContext must not be {@literal null}.
|
||||
@@ -136,7 +136,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes. <br />
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured
|
||||
* attributes. <br />
|
||||
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except
|
||||
* {@literal write-concern} and/or {@literal id}.
|
||||
*
|
||||
@@ -145,19 +146,11 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
* @return {@literal null} in case no client-/uri defined.
|
||||
*/
|
||||
@Nullable
|
||||
private BeanDefinition getConnectionString(Element element, ParserContext parserContext) {
|
||||
private BeanDefinition getMongoUri(Element element, ParserContext parserContext) {
|
||||
|
||||
String type = null;
|
||||
boolean hasClientUri = element.hasAttribute("client-uri");
|
||||
|
||||
if (element.hasAttribute("client-uri")) {
|
||||
type = "client-uri";
|
||||
} else if (element.hasAttribute("connection-string")) {
|
||||
type = "connection-string";
|
||||
} else if (element.hasAttribute("uri")) {
|
||||
type = "uri";
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(type)) {
|
||||
if (!hasClientUri && !element.hasAttribute("uri")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -171,12 +164,16 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||
parserContext.getReaderContext().error(
|
||||
"Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class);
|
||||
builder.addConstructorArgValue(element.getAttribute(type));
|
||||
Class<?> type = MongoClientURI.class;
|
||||
String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type);
|
||||
builder.addConstructorArgValue(uri);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -35,7 +35,6 @@ import org.w3c.dom.Element;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class MongoParsingUtils {
|
||||
@@ -43,78 +42,59 @@ abstract class MongoParsingUtils {
|
||||
private MongoParsingUtils() {}
|
||||
|
||||
/**
|
||||
* Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper
|
||||
* Parses the mongo replica-set element.
|
||||
*
|
||||
* @param parserContext the parser context
|
||||
* @param element the mongo element
|
||||
* @param mongoBuilder the bean definition builder to populate
|
||||
* @return
|
||||
*/
|
||||
static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) {
|
||||
setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds");
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper
|
||||
* attributes.
|
||||
*
|
||||
* @param element
|
||||
* @param mongoClientBuilder
|
||||
* @param element must not be {@literal null}.
|
||||
* @param mongoClientBuilder must not be {@literal null}.
|
||||
* @return
|
||||
* @since 3.0
|
||||
* @since 1.7
|
||||
*/
|
||||
public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
|
||||
Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings");
|
||||
if (settingsElement == null) {
|
||||
Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options");
|
||||
|
||||
if (optionsElement == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoClientSettingsFactoryBean.class);
|
||||
.genericBeanDefinition(MongoClientOptionsFactoryBean.class);
|
||||
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier",
|
||||
"threadsAllowedToBlockForConnectionMultiplier");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout");
|
||||
|
||||
// SocketSettings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize");
|
||||
|
||||
// Server Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency",
|
||||
"serverHeartbeatFrequencyMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency",
|
||||
"serverMinHeartbeatFrequencyMS");
|
||||
|
||||
// Cluster Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout",
|
||||
"clusterServerSelectionTimeoutMS");
|
||||
|
||||
// Connection Pool Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time",
|
||||
"poolMaxConnectionLifeTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time",
|
||||
"poolMaxConnectionIdleTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay",
|
||||
"poolMaintenanceInitialDelayMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency",
|
||||
"poolMaintenanceFrequencyMS");
|
||||
|
||||
// SSL Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed",
|
||||
"sslInvalidHostNameAllowed");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider");
|
||||
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -136,24 +116,6 @@ abstract class MongoParsingUtils {
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ReadConcernPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* One should only register one bean definition but want to have the convenience of using
|
||||
* AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the
|
||||
@@ -161,7 +123,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() {
|
||||
|
||||
Map<String, String> customEditors = new ManagedMap<>();
|
||||
Map<String, String> customEditors = new ManagedMap<String, String>();
|
||||
customEditors.put("com.mongodb.ServerAddress[]",
|
||||
"org.springframework.data.mongodb.config.ServerAddressPropertyEditor");
|
||||
|
||||
@@ -179,7 +141,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<String, Class<?>>();
|
||||
customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
@@ -205,41 +167,4 @@ abstract class MongoParsingUtils {
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadConcernLevel;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the
|
||||
* {@link ReadConcernLevel#fromString(String)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readConcernString) {
|
||||
|
||||
if (!StringUtils.hasText(readConcernString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString)));
|
||||
}
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link UuidRepresentation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String value) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(UuidRepresentation.valueOf(value));
|
||||
}
|
||||
}
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.WriteConcern;
|
||||
public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* Parse a string to a {@link WriteConcern}.
|
||||
* Parse a string to a List<ServerAddress>
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String writeConcernString) {
|
||||
@@ -51,5 +51,6 @@ public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
// pass on the string to the constructor
|
||||
setValue(new WriteConcern(writeConcernString));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,6 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Minsu Kim
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
@@ -136,29 +135,6 @@ public interface BulkOperations {
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
|
||||
@@ -20,7 +20,6 @@ import lombok.EqualsAndHashCode;
|
||||
import java.time.Instant;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -85,19 +84,8 @@ public class ChangeStreamEvent<T> {
|
||||
@Nullable
|
||||
public Instant getTimestamp() {
|
||||
|
||||
return getBsonTimestamp() != null ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class)
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getClusterTime() cluster time}.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public BsonTimestamp getBsonTimestamp() {
|
||||
return raw != null ? raw.getClusterTime() : null;
|
||||
return raw != null && raw.getClusterTime() != null
|
||||
? converter.getConversionService().convert(raw.getClusterTime(), Instant.class) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,17 +21,12 @@ import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
@@ -52,8 +47,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
|
||||
protected ChangeStreamOptions() {}
|
||||
|
||||
@@ -89,31 +83,7 @@ public class ChangeStreamOptions {
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Instant> getResumeTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<BsonTimestamp> getResumeBsonTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isStartAfter() {
|
||||
return Resume.START_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isResumeAfter() {
|
||||
return Resume.RESUME_AFTER.equals(resume);
|
||||
return Optional.ofNullable(resumeTimestamp);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -133,48 +103,6 @@ public class ChangeStreamOptions {
|
||||
return new ChangeStreamOptionsBuilder();
|
||||
}
|
||||
|
||||
private static <T> T asTimestampOfType(Object timestamp, Class<T> targetType) {
|
||||
return targetType.cast(doGetTimestamp(timestamp, targetType));
|
||||
}
|
||||
|
||||
private static <T> Object doGetTimestamp(Object timestamp, Class<T> targetType) {
|
||||
|
||||
if (ClassUtils.isAssignableValue(targetType, timestamp)) {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
if (timestamp instanceof Instant) {
|
||||
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||
}
|
||||
|
||||
if (timestamp instanceof BsonTimestamp) {
|
||||
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
enum Resume {
|
||||
|
||||
UNDEFINED,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument)
|
||||
*/
|
||||
START_AFTER,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument)
|
||||
*/
|
||||
RESUME_AFTER
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@link ChangeStreamOptions}.
|
||||
*
|
||||
@@ -187,8 +115,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
|
||||
private ChangeStreamOptionsBuilder() {}
|
||||
|
||||
@@ -256,11 +183,6 @@ public class ChangeStreamOptions {
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||
|
||||
this.resumeToken = resumeToken;
|
||||
|
||||
if (this.resume == Resume.UNDEFINED) {
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -302,51 +224,6 @@ public class ChangeStreamOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
* @param resumeTimestamp must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to continue emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to start emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.START_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the built {@link ChangeStreamOptions}
|
||||
*/
|
||||
@@ -354,12 +231,11 @@ public class ChangeStreamOptions {
|
||||
|
||||
ChangeStreamOptions options = new ChangeStreamOptions();
|
||||
|
||||
options.filter = this.filter;
|
||||
options.resumeToken = this.resumeToken;
|
||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||
options.collation = this.collation;
|
||||
options.resumeTimestamp = this.resumeTimestamp;
|
||||
options.resume = this.resume;
|
||||
options.filter = filter;
|
||||
options.resumeToken = resumeToken;
|
||||
options.fullDocumentLookup = fullDocumentLookup;
|
||||
options.collation = collation;
|
||||
options.resumeTimestamp = resumeTimestamp;
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@@ -1,227 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Value object representing a count query. Count queries using {@code $near} or {@code $nearSphere} require a rewrite
|
||||
* to {@code $geoWithin}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class CountQuery {
|
||||
|
||||
private Document source;
|
||||
|
||||
private CountQuery(Document source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public static CountQuery of(Document source) {
|
||||
return new CountQuery(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the query {@link Document} that can be used with {@code countDocuments()}. Potentially rewrites the query
|
||||
* to be usable with {@code countDocuments()}.
|
||||
*
|
||||
* @return the query {@link Document} that can be used with {@code countDocuments()}.
|
||||
*/
|
||||
public Document toQueryDocument() {
|
||||
|
||||
if (!requiresRewrite(source)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
Document target = new Document();
|
||||
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Document theValue = (Document) entry.getValue();
|
||||
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Collection<?> source = (Collection<?>) entry.getValue();
|
||||
|
||||
target.put(entry.getKey(), rewriteCollection(source));
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("$and".equals(entry.getKey()) && target.containsKey("$and")) {
|
||||
// Expect $and to be processed with Document and createGeoWithin.
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param valueToInspect
|
||||
* @return {@code true} if the enclosing element needs to be rewritten.
|
||||
*/
|
||||
private boolean requiresRewrite(Object valueToInspect) {
|
||||
|
||||
if (valueToInspect instanceof Document) {
|
||||
return requiresRewrite((Document) valueToInspect);
|
||||
}
|
||||
|
||||
if (valueToInspect instanceof Collection) {
|
||||
return requiresRewrite((Collection) valueToInspect);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Collection<?> collection) {
|
||||
|
||||
for (Object o : collection) {
|
||||
if (o instanceof Document && requiresRewrite((Document) o)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Document document) {
|
||||
|
||||
if (containsNear(document)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (Object entry : document.values()) {
|
||||
|
||||
if (requiresRewrite(entry)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Collection<Object> rewriteCollection(Collection<?> source) {
|
||||
|
||||
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
||||
|
||||
for (Object item : source) {
|
||||
if (item instanceof Document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
|
||||
} else {
|
||||
rewrittenCollection.add(item);
|
||||
}
|
||||
}
|
||||
|
||||
return rewrittenCollection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrite the near query for field {@code key} to {@code $geoWithin}.
|
||||
*
|
||||
* @param key the queried field.
|
||||
* @param source source {@link Document}.
|
||||
* @param $and potentially existing {@code $and} condition.
|
||||
* @return the rewritten query {@link Document}.
|
||||
*/
|
||||
private static Document createGeoWithin(String key, Document source, @Nullable Object $and) {
|
||||
|
||||
boolean spheric = source.containsKey("$nearSphere");
|
||||
Object $near = spheric ? source.get("$nearSphere") : source.get("$near");
|
||||
|
||||
Number maxDistance = source.containsKey("$maxDistance") ? (Number) source.get("$maxDistance") : Double.MAX_VALUE;
|
||||
List<Object> $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance);
|
||||
Document $geoWithinMax = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMax));
|
||||
|
||||
if (!containsNearWithMinDistance(source)) {
|
||||
return new Document(key, $geoWithinMax);
|
||||
}
|
||||
|
||||
Number minDistance = (Number) source.get("$minDistance");
|
||||
List<Object> $centerMin = Arrays.asList(toCenterCoordinates($near), minDistance);
|
||||
Document $geoWithinMin = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMin));
|
||||
|
||||
List<Document> criteria = new ArrayList<>();
|
||||
|
||||
if ($and != null) {
|
||||
if ($and instanceof Collection) {
|
||||
criteria.addAll((Collection) $and);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
|
||||
+ $and);
|
||||
}
|
||||
}
|
||||
|
||||
criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin))));
|
||||
criteria.add(new Document(key, $geoWithinMax));
|
||||
return new Document("$and", criteria);
|
||||
}
|
||||
|
||||
private static boolean containsNear(Document source) {
|
||||
return source.containsKey("$near") || source.containsKey("$nearSphere");
|
||||
}
|
||||
|
||||
private static boolean containsNearWithMinDistance(Document source) {
|
||||
|
||||
if (!containsNear(source)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return source.containsKey("$minDistance");
|
||||
}
|
||||
|
||||
private static Object toCenterCoordinates(Object value) {
|
||||
|
||||
if (ObjectUtils.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Point) {
|
||||
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
|
||||
}
|
||||
|
||||
if (value instanceof Document && ((Document) value).containsKey("x")) {
|
||||
|
||||
Document point = (Document) value;
|
||||
return Arrays.asList(point.get("x"), point.get("y"));
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
@@ -15,15 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.FindIterable;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Simple callback interface to allow customization of a {@link FindIterable}.
|
||||
@@ -31,14 +25,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface CursorPreparer extends ReadPreferenceAware {
|
||||
|
||||
/**
|
||||
* Default {@link CursorPreparer} just passing on the given {@link FindIterable}.
|
||||
*
|
||||
* @since 2.2
|
||||
*/
|
||||
CursorPreparer NO_OP_PREPARER = (iterable -> iterable);
|
||||
public interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
@@ -46,37 +33,4 @@ public interface CursorPreparer extends ReadPreferenceAware {
|
||||
* @param cursor
|
||||
*/
|
||||
FindIterable<Document> prepare(FindIterable<Document> cursor);
|
||||
|
||||
/**
|
||||
* Apply query specific settings to {@link MongoCollection} and initate a find operation returning a
|
||||
* {@link FindIterable} via the given {@link Function find} function.
|
||||
*
|
||||
* @param collection must not be {@literal null}.
|
||||
* @param find must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if one of the required arguments is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default FindIterable<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindIterable<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
}
|
||||
|
||||
return prepare(find.apply(collection));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link ReadPreference} to apply or {@literal null} if none defined.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
default ReadPreference getReadPreference() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,32 +26,30 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.*;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -60,10 +58,7 @@ import com.mongodb.client.model.*;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -71,7 +66,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final BulkOperationContext bulkOperationContext;
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
private final List<WriteModel<Document>> models = new ArrayList<>();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
@@ -128,9 +123,16 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||
addModel(source, new InsertOneModel<>(getMappedObject(source)));
|
||||
if (document instanceof Document) {
|
||||
|
||||
models.add(new InsertOneModel<>((Document) document));
|
||||
return this;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
mongoOperations.getConverter().write(document, sink);
|
||||
|
||||
models.add(new InsertOneModel<>(sink));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -244,7 +246,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
|
||||
addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
models.add(new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -265,29 +267,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(replacement);
|
||||
addModel(source,
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(source), replaceOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
@@ -297,47 +276,16 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
try {
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
private BulkWriteResult bulkWriteTo(MongoCollection<Document> collection) {
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions);
|
||||
});
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
|
||||
private WriteModel<Document> extractAndMapWriteModel(SourceAwareWriteModelHolder it) {
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.getModel());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -354,12 +302,14 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
|
||||
if (multi) {
|
||||
addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
models.add(new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
} else {
|
||||
addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
models.add(new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
}
|
||||
|
||||
return this;
|
||||
@@ -408,100 +358,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private void addModel(Object source, WriteModel<Document> model) {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
@@ -516,29 +372,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
throw new IllegalStateException("BulkMode was null!");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link BulkOperationContext} holds information about
|
||||
* {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to
|
||||
@@ -554,20 +387,5 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
@NonNull Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
@NonNull QueryMapper queryMapper;
|
||||
@NonNull UpdateMapper updateMapper;
|
||||
ApplicationEventPublisher eventPublisher;
|
||||
EntityCallbacks entityCallbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Value
|
||||
private static class SourceAwareWriteModelHolder {
|
||||
|
||||
Object source;
|
||||
WriteModel<Document> model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -64,7 +64,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
this(mongoDbFactory, collectionName, queryMapper, null);
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
@@ -120,15 +120,19 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return execute(collection -> {
|
||||
|
||||
MongoPersistentEntity<?> entity = lookupPersistentEntity(type, collectionName);
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
|
||||
Document mappedKeys = mapper.getMappedSort(indexDefinition.getIndexKeys(), entity);
|
||||
return collection.createIndex(mappedKeys, indexOptions);
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
ops.partialFilterExpression(mapper.getMappedObject((Document) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY),
|
||||
lookupPersistentEntity(type, collectionName)));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -188,7 +192,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
|
||||
|
||||
List<IndexInfo> indexInfoList = new ArrayList<>();
|
||||
List<IndexInfo> indexInfoList = new ArrayList<IndexInfo>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
|
||||
@@ -213,25 +217,4 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, callback);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
mapper.getMappedSort((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
|
||||
/**
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}.
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -29,14 +29,14 @@ import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
*/
|
||||
class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final QueryMapper mapper;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) {
|
||||
DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory, QueryMapper mapper) {
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mapper = mapper;
|
||||
|
||||
@@ -94,16 +94,23 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
ops = ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject(indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY, Document.class), entity));
|
||||
}
|
||||
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions);
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
|
||||
}).next();
|
||||
}
|
||||
@@ -119,24 +126,21 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public Mono<Void> dropIndex(final String name) {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes()
|
||||
*/
|
||||
public Mono<Void> dropAllIndexes() {
|
||||
return dropIndex("*");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo()
|
||||
*/
|
||||
public Flux<IndexInfo> getIndexInfo() {
|
||||
@@ -144,25 +148,4 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
||||
.map(IndexConverters.documentToIndexInfoConverter()::convert);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,15 +42,13 @@ import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}.
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
private static final String SCRIPT_COLLECTION_NAME = "system.js";
|
||||
|
||||
@@ -21,14 +21,12 @@ import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
@@ -36,15 +34,15 @@ import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
*
|
||||
@@ -153,57 +151,13 @@ class EntityOperations {
|
||||
return ID_FIELD;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return the name of the distanceField to use. {@literal dis} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
public String nearQueryDistanceFieldName(Class<?> domainType) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(domainType);
|
||||
if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) {
|
||||
return "dis";
|
||||
}
|
||||
|
||||
String distanceFieldName = "calculated-distance";
|
||||
int counter = 0;
|
||||
while (persistentEntity.getPersistentProperty(distanceFieldName) != null) {
|
||||
distanceFieldName += "-" + (counter++);
|
||||
}
|
||||
|
||||
return distanceFieldName;
|
||||
}
|
||||
|
||||
private static Document parse(String source) {
|
||||
|
||||
try {
|
||||
return Document.parse(source);
|
||||
} catch (org.bson.json.JsonParseException o_O) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
} catch (RuntimeException o_O) {
|
||||
|
||||
// legacy 3.x exception
|
||||
if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) {
|
||||
} catch (JSONParseException | org.bson.json.JsonParseException o_O) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
}
|
||||
throw o_O;
|
||||
}
|
||||
}
|
||||
|
||||
public <T> TypedOperations<T> forType(@Nullable Class<T> entityClass) {
|
||||
|
||||
if (entityClass != null) {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(entityClass);
|
||||
|
||||
if (entity != null) {
|
||||
return new TypedEntityOperations(entity);
|
||||
}
|
||||
|
||||
}
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -235,16 +189,6 @@ class EntityOperations {
|
||||
*/
|
||||
Query getByIdQuery();
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}.
|
||||
*
|
||||
* @return the {@link Query} to use for removing the entity. Never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default Query getRemoveByQuery() {
|
||||
return isVersionedEntity() ? getQueryForVersion() : getByIdQuery();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to find the entity in its current version.
|
||||
*
|
||||
@@ -275,11 +219,9 @@ class EntityOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}.
|
||||
* Returns the value of the version if the entity has a version property, {@literal null} otherwise.
|
||||
*
|
||||
* @return the entity version. Can be {@literal null}.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check
|
||||
* {@link #isVersionedEntity()}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
Object getVersion();
|
||||
@@ -335,8 +277,8 @@ class EntityOperations {
|
||||
/**
|
||||
* Returns the current version value if the entity has a version property.
|
||||
*
|
||||
* @return the current version or {@literal null} in case it's uninitialized.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property.
|
||||
* @return the current version or {@literal null} in case it's uninitialized or the entity doesn't expose a version
|
||||
* property.
|
||||
*/
|
||||
@Nullable
|
||||
Number getVersion();
|
||||
@@ -538,10 +480,10 @@ class EntityOperations {
|
||||
public Query getQueryForVersion() {
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
MongoPersistentProperty property = entity.getRequiredVersionProperty();
|
||||
|
||||
return new Query(Criteria.where(idProperty.getName()).is(getId())//
|
||||
.and(versionProperty.getName()).is(getVersion()));
|
||||
.and(property.getName()).is(getVersion()));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -725,102 +667,4 @@ class EntityOperations {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-specific operations abstraction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @param <T>
|
||||
* @since 2.2
|
||||
*/
|
||||
interface TypedOperations<T> {
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} for the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation();
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for
|
||||
* the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom
|
||||
* conversions).
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
enum UntypedOperations implements TypedOperations<Object> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static <T> TypedOperations<T> instance() {
|
||||
return (TypedOperations) INSTANCE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} backed by {@link MongoPersistentEntity}.
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class TypedEntityOperations<T> implements TypedOperations<T> {
|
||||
|
||||
private final @NonNull MongoPersistentEntity<T> entity;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query.getCollation().isPresent()) {
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -44,7 +43,7 @@ import com.mongodb.client.MongoCollection;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
@@ -171,18 +170,6 @@ public interface ExecutableFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -304,21 +291,9 @@ public interface ExecutableFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -268,11 +267,6 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
this.limit = Optional.of(limit);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadPreference getReadPreference() {
|
||||
return delegate.getReadPreference();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,7 +19,6 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -45,7 +44,6 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ExecutableMapReduceOperation {
|
||||
@@ -148,18 +146,6 @@ public interface ExecutableMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -120,18 +119,6 @@ public interface ExecutableRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,11 +17,8 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
@@ -154,16 +151,13 @@ public interface ExecutableUpdateOperation {
|
||||
interface UpdateWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Set the {@link UpdateDefinition} to be applied.
|
||||
* Set the {@link Update} to be applied.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingUpdate}.
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
TerminatingUpdate<T> apply(UpdateDefinition update);
|
||||
TerminatingUpdate<T> apply(Update update);
|
||||
|
||||
/**
|
||||
* Specify {@code replacement} object.
|
||||
@@ -211,18 +205,6 @@ public interface ExecutableUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,7 +21,7 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -67,7 +67,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class domainType;
|
||||
Query query;
|
||||
@Nullable UpdateDefinition update;
|
||||
@Nullable Update update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@@ -76,10 +76,10 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(Update)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingUpdate<T> apply(UpdateDefinition update) {
|
||||
public TerminatingUpdate<T> apply(Update update) {
|
||||
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
|
||||
@@ -33,31 +33,6 @@ public class FindAndModifyOptions {
|
||||
|
||||
private @Nullable Collation collation;
|
||||
|
||||
private static final FindAndModifyOptions NONE = new FindAndModifyOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed. Please use FindAndModifyOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions returnNew(boolean returnNew) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions upsert(boolean upsert) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions remove(boolean remove) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions collation(@Nullable Collation collation) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a FindAndModifyOptions instance
|
||||
*
|
||||
@@ -67,16 +42,6 @@ public class FindAndModifyOptions {
|
||||
return new FindAndModifyOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndModifyOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndModifyOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link FindAndModifyOptions} based on option of given {@litearl source}.
|
||||
*
|
||||
|
||||
@@ -36,21 +36,6 @@ public class FindAndReplaceOptions {
|
||||
private boolean returnNew;
|
||||
private boolean upsert;
|
||||
|
||||
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed. Please use FindAndReplaceOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions returnNew() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions upsert() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance.
|
||||
* <dl>
|
||||
@@ -66,16 +51,6 @@ public class FindAndReplaceOptions {
|
||||
return new FindAndReplaceOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndReplaceOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance with
|
||||
* <dl>
|
||||
|
||||
@@ -15,69 +15,20 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Simple callback interface to allow customization of a {@link FindPublisher}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Konstantin Volivach
|
||||
*/
|
||||
public interface FindPublisherPreparer extends ReadPreferenceAware {
|
||||
|
||||
/**
|
||||
* Default {@link FindPublisherPreparer} just passing on the given {@link FindPublisher}.
|
||||
*
|
||||
* @since 2.2
|
||||
*/
|
||||
FindPublisherPreparer NO_OP_PREPARER = (findPublisher -> findPublisher);
|
||||
public interface FindPublisherPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param findPublisher must not be {@literal null}.
|
||||
*/
|
||||
FindPublisher<Document> prepare(FindPublisher<Document> findPublisher);
|
||||
|
||||
/**
|
||||
* Apply query specific settings to {@link MongoCollection} and initate a find operation returning a
|
||||
* {@link FindPublisher} via the given {@link Function find} function.
|
||||
*
|
||||
* @param collection must not be {@literal null}.
|
||||
* @param find must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if one of the required arguments is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default FindPublisher<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindPublisher<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
}
|
||||
|
||||
return prepare(find.apply(collection));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link ReadPreference} to apply or {@literal null} if none defined.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
default ReadPreference getReadPreference() {
|
||||
return null;
|
||||
}
|
||||
<T> FindPublisher<T> prepare(FindPublisher<T> findPublisher);
|
||||
}
|
||||
|
||||
@@ -80,11 +80,7 @@ public class MappedDocument {
|
||||
}
|
||||
|
||||
public Bson getIdFilter() {
|
||||
return new Document(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public Object get(String key) {
|
||||
return document.get(key);
|
||||
return Filters.eq(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public UpdateDefinition updateWithoutId() {
|
||||
@@ -96,7 +92,7 @@ public class MappedDocument {
|
||||
* mapped to the specific domain type.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @since 2.1.4
|
||||
*/
|
||||
class MappedUpdate implements UpdateDefinition {
|
||||
|
||||
@@ -141,14 +137,5 @@ public class MappedDocument {
|
||||
public Boolean isIsolated() {
|
||||
return delegate.isIsolated();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,210 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
* domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names}
|
||||
* and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public MongoJsonSchema createSchemaFor(Class<?> type) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty nested : entity) {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
break;
|
||||
}
|
||||
|
||||
currentPath.add(nested);
|
||||
schemaProperties.add(computeSchemaForProperty(currentPath));
|
||||
}
|
||||
|
||||
return schemaProperties;
|
||||
}
|
||||
|
||||
private JsonSchemaProperty computeSchemaForProperty(List<MongoPersistentProperty> path) {
|
||||
|
||||
MongoPersistentProperty property = CollectionUtils.lastElement(path);
|
||||
|
||||
boolean required = isRequiredProperty(property);
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentProperty property, boolean required) {
|
||||
|
||||
ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName());
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(path,
|
||||
mappingContext.getRequiredPersistentEntity(property));
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) {
|
||||
return createSchemaProperty(fieldName, type, required, Collections.emptyList());
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
|
||||
return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName()
|
||||
: property.getName();
|
||||
}
|
||||
|
||||
private boolean isRequiredProperty(PersistentProperty property) {
|
||||
return property.getType().isPrimitive();
|
||||
}
|
||||
|
||||
private Class<?> computeTargetType(PersistentProperty<?> property) {
|
||||
|
||||
if (!(property instanceof MongoPersistentProperty)) {
|
||||
return property.getType();
|
||||
}
|
||||
|
||||
MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property;
|
||||
if (!mongoProperty.isIdProperty()) {
|
||||
return mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
if (mongoProperty.hasExplicitWriteTarget()) {
|
||||
return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass();
|
||||
}
|
||||
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
import org.springframework.jmx.export.annotation.ManagedResource;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -36,14 +36,10 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
|
||||
private final MongoClient mongoClient;
|
||||
|
||||
/**
|
||||
* @param client the underlying {@link com.mongodb.client.MongoClient} used for data access.
|
||||
* @since 2.2
|
||||
*/
|
||||
public MongoAdmin(MongoClient client) {
|
||||
public MongoAdmin(MongoClient mongoClient) {
|
||||
|
||||
Assert.notNull(client, "Client must not be null!");
|
||||
this.mongoClient = client;
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
this.mongoClient = mongoClient;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
|
||||
@@ -16,72 +16,66 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.event.ClusterListener;
|
||||
|
||||
/**
|
||||
* Convenient factory for configuring MongoDB.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
*/
|
||||
public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator();
|
||||
|
||||
private @Nullable MongoClientSettings mongoClientSettings;
|
||||
private @Nullable MongoClientOptions mongoClientOptions;
|
||||
private @Nullable String host;
|
||||
private @Nullable Integer port;
|
||||
private @Nullable List<MongoCredential> credential = null;
|
||||
private @Nullable ConnectionString connectionString;
|
||||
private @Nullable String replicaSet = null;
|
||||
private List<ServerAddress> replicaSetSeeds = Collections.emptyList();
|
||||
private List<MongoCredential> credentials = Collections.emptyList();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}.
|
||||
* Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClientOptions
|
||||
*/
|
||||
public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) {
|
||||
this.mongoClientSettings = mongoClientOptions;
|
||||
public void setMongoClientOptions(@Nullable MongoClientOptions mongoClientOptions) {
|
||||
this.mongoClientOptions = mongoClientOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of credentials to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param credential can be {@literal null}.
|
||||
* @param credentials can be {@literal null}.
|
||||
*/
|
||||
public void setCredential(@Nullable MongoCredential[] credential) {
|
||||
this.credential = Arrays.asList(credential);
|
||||
public void setCredentials(@Nullable MongoCredential[] credentials) {
|
||||
this.credentials = filterNonNullElementsAsList(credentials);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of {@link ServerAddress} to build up a replica set for.
|
||||
*
|
||||
* @param replicaSetSeeds can be {@literal null}.
|
||||
*/
|
||||
public void setReplicaSetSeeds(@Nullable ServerAddress[] replicaSetSeeds) {
|
||||
this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -102,14 +96,6 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
public void setConnectionString(@Nullable ConnectionString connectionString) {
|
||||
this.connectionString = connectionString;
|
||||
}
|
||||
|
||||
public void setReplicaSet(@Nullable String replicaSet) {
|
||||
this.replicaSet = replicaSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to use.
|
||||
*
|
||||
@@ -142,198 +128,12 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
*/
|
||||
@Override
|
||||
protected MongoClient createInstance() throws Exception {
|
||||
return createMongoClient(computeClientSetting());
|
||||
|
||||
if (mongoClientOptions == null) {
|
||||
mongoClientOptions = MongoClientOptions.builder().build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link MongoClientSettings} based on configuration and priority (lower is better).
|
||||
* <ol>
|
||||
* <li>{@link MongoClientFactoryBean#mongoClientSettings}</li>
|
||||
* <li>{@link MongoClientFactoryBean#connectionString}</li>
|
||||
* <li>default {@link MongoClientSettings}</li>
|
||||
* </ol>
|
||||
*
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings computeClientSetting() {
|
||||
|
||||
if (connectionString != null && (StringUtils.hasText(host) || port != null)) {
|
||||
throw new IllegalStateException("ConnectionString and host/port configuration exclude one another!");
|
||||
}
|
||||
|
||||
ConnectionString connectionString = this.connectionString != null ? this.connectionString
|
||||
: new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()),
|
||||
getOrDefault(port, "" + ServerAddress.defaultPort())));
|
||||
|
||||
Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString);
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
|
||||
if (mongoClientSettings != null) {
|
||||
|
||||
MongoClientSettings defaultSettings = MongoClientSettings.builder().build();
|
||||
|
||||
SslSettings sslSettings = mongoClientSettings.getSslSettings();
|
||||
ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings();
|
||||
ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings();
|
||||
SocketSettings socketSettings = mongoClientSettings.getSocketSettings();
|
||||
ServerSettings serverSettings = mongoClientSettings.getServerSettings();
|
||||
|
||||
builder = builder //
|
||||
.applicationName(computeSettingsValue(defaultSettings.getApplicationName(),
|
||||
mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) //
|
||||
.applyToSslSettings(settings -> {
|
||||
|
||||
applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled()));
|
||||
applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed())));
|
||||
settings.context(sslSettings.getContext());
|
||||
}).applyToClusterSettings(settings -> {
|
||||
|
||||
applySettings(settings::hosts,
|
||||
computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList())));
|
||||
|
||||
applySettings(settings::requiredReplicaSetName,
|
||||
computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(),
|
||||
clusterSettings, connectionString.getRequiredReplicaSetName()));
|
||||
|
||||
applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold()));
|
||||
|
||||
applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getServerSelectionTimeout()));
|
||||
|
||||
applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
List<ClusterListener> clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null);
|
||||
if (clusterListeners != null) {
|
||||
clusterListeners.forEach(settings::addClusterListener);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionIdleTime()));
|
||||
|
||||
applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionLifeTime()));
|
||||
|
||||
applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxWaitTime()));
|
||||
|
||||
applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue(
|
||||
(ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(settings::minSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMinConnectionPoolSize()));
|
||||
applySettings(settings::maxSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMaxConnectionPoolSize()));
|
||||
}) //
|
||||
.applyToSocketSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout()));
|
||||
|
||||
applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout()));
|
||||
applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
}) //
|
||||
.applyToServerSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, null));
|
||||
|
||||
applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency()));
|
||||
settings.applySettings(serverSettings);
|
||||
}) //
|
||||
.autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) //
|
||||
.codecRegistry(mongoClientSettings.getCodecRegistry()); //
|
||||
|
||||
applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(),
|
||||
mongoClientSettings.getReadConcern(), connectionString.getReadConcern()));
|
||||
applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(),
|
||||
mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern()));
|
||||
applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(),
|
||||
mongoClientSettings.getReadPreference(), connectionString.getReadPreference()));
|
||||
applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(),
|
||||
mongoClientSettings.getRetryReads(), connectionString.getRetryReads()));
|
||||
applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(),
|
||||
mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue()));
|
||||
applySettings(builder::uuidRepresentation,
|
||||
computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY));
|
||||
}
|
||||
|
||||
if (!CollectionUtils.isEmpty(credential)) {
|
||||
builder = builder.credential(credential.iterator().next());
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(replicaSet)) {
|
||||
builder.applyToClusterSettings((settings) -> {
|
||||
settings.requiredReplicaSetName(replicaSet);
|
||||
});
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private <T> void applySettings(Consumer<T> settingsBuilder, @Nullable T value) {
|
||||
|
||||
if (ObjectUtils.isEmpty(value)) {
|
||||
return;
|
||||
}
|
||||
settingsBuilder.accept(value);
|
||||
}
|
||||
|
||||
private <S, T> T computeSettingsValue(Function<S, T> function, S defaultValueHolder, S settingsValueHolder,
|
||||
@Nullable T connectionStringValue) {
|
||||
return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder),
|
||||
connectionStringValue);
|
||||
}
|
||||
|
||||
private <T> T computeSettingsValue(T defaultValue, T fromSettings, T fromConnectionString) {
|
||||
|
||||
boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings);
|
||||
boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString);
|
||||
|
||||
if (!fromSettingsIsDefault) {
|
||||
return fromSettings;
|
||||
}
|
||||
return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue;
|
||||
return createMongoClient();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -348,11 +148,43 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
}
|
||||
}
|
||||
|
||||
private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
private MongoClient createMongoClient() throws UnknownHostException {
|
||||
|
||||
if (!CollectionUtils.isEmpty(replicaSetSeeds)) {
|
||||
return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
private String getOrDefault(Object value, String defaultValue) {
|
||||
return !StringUtils.isEmpty(value) ? value.toString() : defaultValue;
|
||||
return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException {
|
||||
|
||||
ServerAddress defaultAddress = new ServerAddress();
|
||||
|
||||
return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(),
|
||||
port != null ? port.intValue() : defaultAddress.getPort());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given array as {@link List} with all {@literal null} elements removed.
|
||||
*
|
||||
* @param elements the elements to filter <T>, can be {@literal null}.
|
||||
* @return a new unmodifiable {@link List#} from the given elements without {@literal null}s.
|
||||
*/
|
||||
private static <T> List<T> filterNonNullElementsAsList(@Nullable T[] elements) {
|
||||
|
||||
if (elements == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<T> candidateElements = new ArrayList<T>();
|
||||
|
||||
for (T element : elements) {
|
||||
if (element != null) {
|
||||
candidateElements.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableList(candidateElements);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,314 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.DBDecoderFactory;
|
||||
import com.mongodb.DBEncoderFactory;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientOptions} instance.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
*/
|
||||
public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClientOptions> {
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getDescription();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
.getThreadsAllowedToBlockForConnectionMultiplier();
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime();
|
||||
private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime();
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory();
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout();
|
||||
private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout();
|
||||
private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName();
|
||||
private int serverSelectionTimeout = DEFAULT_MONGO_OPTIONS.getServerSelectionTimeout();
|
||||
|
||||
private boolean ssl;
|
||||
private @Nullable SSLSocketFactory sslSocketFactory;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClient} description.
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum number of connections per host.
|
||||
*
|
||||
* @param minConnectionsPerHost
|
||||
*/
|
||||
public void setMinConnectionsPerHost(int minConnectionsPerHost) {
|
||||
this.minConnectionsPerHost = minConnectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of connections allowed per host. Will block if run out. Default is 10. System property
|
||||
* {@code MONGO.POOLSIZE} can override
|
||||
*
|
||||
* @param connectionsPerHost
|
||||
*/
|
||||
public void setConnectionsPerHost(int connectionsPerHost) {
|
||||
this.connectionsPerHost = connectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is
|
||||
* 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an
|
||||
* exception will be thrown.
|
||||
*
|
||||
* @param threadsAllowedToBlockForConnectionMultiplier
|
||||
*/
|
||||
public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) {
|
||||
this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
*
|
||||
* @param maxWaitTime
|
||||
*/
|
||||
public void setMaxWaitTime(int maxWaitTime) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum idle time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionIdleTime
|
||||
*/
|
||||
public void setMaxConnectionIdleTime(int maxConnectionIdleTime) {
|
||||
this.maxConnectionIdleTime = maxConnectionIdleTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum life time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionLifeTime
|
||||
*/
|
||||
public void setMaxConnectionLifeTime(int maxConnectionLifeTime) {
|
||||
this.maxConnectionLifeTime = maxConnectionLifeTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout in milliseconds. 0 is default and infinite.
|
||||
*
|
||||
* @param connectTimeout
|
||||
*/
|
||||
public void setConnectTimeout(int connectTimeout) {
|
||||
this.connectTimeout = connectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout. 0 is default and infinite.
|
||||
*
|
||||
* @param socketTimeout
|
||||
*/
|
||||
public void setSocketTimeout(int socketTimeout) {
|
||||
this.socketTimeout = socketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
*
|
||||
* @param socketKeepAlive
|
||||
*/
|
||||
public void setSocketKeepAlive(boolean socketKeepAlive) {
|
||||
this.socketKeepAlive = socketKeepAlive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(@Nullable ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB
|
||||
* object.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(@Nullable WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketFactory
|
||||
*/
|
||||
public void setSocketFactory(@Nullable SocketFactory socketFactory) {
|
||||
this.socketFactory = socketFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the frequency that the driver will attempt to determine the current state of each server in the cluster.
|
||||
*
|
||||
* @param heartbeatFrequency
|
||||
*/
|
||||
public void setHeartbeatFrequency(int heartbeatFrequency) {
|
||||
this.heartbeatFrequency = heartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* In the event that the driver has to frequently re-check a server's availability, it will wait at least this long
|
||||
* since the previous check to avoid wasted effort.
|
||||
*
|
||||
* @param minHeartbeatFrequency
|
||||
*/
|
||||
public void setMinHeartbeatFrequency(int minHeartbeatFrequency) {
|
||||
this.minHeartbeatFrequency = minHeartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatConnectTimeout
|
||||
*/
|
||||
public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) {
|
||||
this.heartbeatConnectTimeout = heartbeatConnectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatSocketTimeout
|
||||
*/
|
||||
public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) {
|
||||
this.heartbeatSocketTimeout = heartbeatSocketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the name of the replica set.
|
||||
*
|
||||
* @param requiredReplicaSetName
|
||||
*/
|
||||
public void setRequiredReplicaSetName(String requiredReplicaSetName) {
|
||||
this.requiredReplicaSetName = requiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to |@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
public void setSsl(boolean ssl) {
|
||||
this.ssl = ssl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here,
|
||||
* {@link SSLSocketFactory#getDefault()} will be used.
|
||||
*
|
||||
* @param sslSocketFactory
|
||||
*/
|
||||
public void setSslSocketFactory(@Nullable SSLSocketFactory sslSocketFactory) {
|
||||
|
||||
this.sslSocketFactory = sslSocketFactory;
|
||||
this.ssl = sslSocketFactory != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@literal server selection timeout} in msec for a 3.x MongoDB Java driver. If not set the default value of
|
||||
* 30 sec will be used. A value of 0 means that it will timeout immediately if no server is available. A negative
|
||||
* value means to wait indefinitely.
|
||||
*
|
||||
* @param serverSelectionTimeout in msec.
|
||||
*/
|
||||
public void setServerSelectionTimeout(int serverSelectionTimeout) {
|
||||
this.serverSelectionTimeout = serverSelectionTimeout;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||
*/
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@Override
|
||||
protected MongoClientOptions createInstance() throws Exception {
|
||||
|
||||
SocketFactory socketFactoryToUse = ssl
|
||||
? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault())
|
||||
: this.socketFactory;
|
||||
|
||||
return MongoClientOptions.builder() //
|
||||
.alwaysUseMBeans(this.alwaysUseMBeans) //
|
||||
.connectionsPerHost(this.connectionsPerHost) //
|
||||
.connectTimeout(connectTimeout) //
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.description(description) //
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
.maxConnectionIdleTime(maxConnectionIdleTime) //
|
||||
.maxConnectionLifeTime(maxConnectionLifeTime) //
|
||||
.maxWaitTime(maxWaitTime) //
|
||||
.minConnectionsPerHost(minConnectionsPerHost) //
|
||||
.minHeartbeatFrequency(minHeartbeatFrequency) //
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.socketFactory(socketFactoryToUse) //
|
||||
.socketKeepAlive(socketKeepAlive) //
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientOptions.class;
|
||||
}
|
||||
}
|
||||
@@ -1,486 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private @Nullable Boolean retryReads = null;
|
||||
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private @Nullable Boolean retryWrites = null;
|
||||
|
||||
private @Nullable String applicationName = null;
|
||||
|
||||
private @Nullable UuidRepresentation uUidRepresentation = null;
|
||||
|
||||
// --> Socket Settings
|
||||
|
||||
private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings()
|
||||
.getConnectTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize();
|
||||
private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize();
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost();
|
||||
private List<ServerAddress> clusterHosts = Collections.emptyList();
|
||||
private @Nullable ClusterConnectionMode clusterConnectionMode = null;
|
||||
private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType();
|
||||
private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getRequiredReplicaSetName();
|
||||
private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getLocalThreshold(TimeUnit.MILLISECONDS);
|
||||
private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getServerSelectionTimeout(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize();
|
||||
private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize();
|
||||
private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxWaitTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled();
|
||||
private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed();
|
||||
private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled()
|
||||
? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName()
|
||||
: "";
|
||||
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) {
|
||||
this.socketConnectTimeoutMS = socketConnectTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReadTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketReadTimeoutMS(int socketReadTimeoutMS) {
|
||||
this.socketReadTimeoutMS = socketReadTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReceiveBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int)
|
||||
*/
|
||||
public void setSocketReceiveBufferSize(int socketReceiveBufferSize) {
|
||||
this.socketReceiveBufferSize = socketReceiveBufferSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketSendBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int)
|
||||
*/
|
||||
public void setSocketSendBufferSize(int socketSendBufferSize) {
|
||||
this.socketSendBufferSize = socketSendBufferSize;
|
||||
}
|
||||
|
||||
// --> Server Settings
|
||||
|
||||
private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
/**
|
||||
* @param serverHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) {
|
||||
this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverMinHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) {
|
||||
this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
/**
|
||||
* @param clusterSrvHost
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String)
|
||||
*/
|
||||
public void setClusterSrvHost(String clusterSrvHost) {
|
||||
this.clusterSrvHost = clusterSrvHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterHosts
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#hosts(List)
|
||||
*/
|
||||
public void setClusterHosts(ServerAddress[] clusterHosts) {
|
||||
this.clusterHosts = Arrays.asList(clusterHosts);
|
||||
}
|
||||
|
||||
/**
|
||||
* ????
|
||||
*
|
||||
* @param clusterConnectionMode
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode)
|
||||
*/
|
||||
public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) {
|
||||
this.clusterConnectionMode = clusterConnectionMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param custerRequiredClusterType
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType)
|
||||
*/
|
||||
public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) {
|
||||
this.custerRequiredClusterType = custerRequiredClusterType;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterRequiredReplicaSetName
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String)
|
||||
*/
|
||||
public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) {
|
||||
this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterLocalThresholdMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) {
|
||||
this.clusterLocalThresholdMS = clusterLocalThresholdMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterServerSelectionTimeoutMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) {
|
||||
this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS;
|
||||
}
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
/**
|
||||
* @param poolMaxSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int)
|
||||
*/
|
||||
public void setPoolMaxSize(int poolMaxSize) {
|
||||
this.poolMaxSize = poolMaxSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMinSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int)
|
||||
*/
|
||||
public void setPoolMinSize(int poolMinSize) {
|
||||
this.poolMinSize = poolMinSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxWaitTimeMS in mesec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) {
|
||||
this.poolMaxWaitTimeMS = poolMaxWaitTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionLifeTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) {
|
||||
this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionIdleTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) {
|
||||
this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceInitialDelayMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) {
|
||||
this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) {
|
||||
this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS;
|
||||
}
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
/**
|
||||
* @param sslEnabled
|
||||
* @see com.mongodb.connection.SslSettings.Builder#enabled(boolean)
|
||||
*/
|
||||
public void setSslEnabled(Boolean sslEnabled) {
|
||||
this.sslEnabled = sslEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslInvalidHostNameAllowed
|
||||
* @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean)
|
||||
*/
|
||||
public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) {
|
||||
this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslProvider
|
||||
* @see com.mongodb.connection.SslSettings.Builder#context(SSLContext)
|
||||
* @see SSLContext#getInstance(String)
|
||||
*/
|
||||
public void setSslProvider(String sslProvider) {
|
||||
this.sslProvider = sslProvider;
|
||||
}
|
||||
|
||||
// encryption and retry
|
||||
|
||||
/**
|
||||
* @param applicationName
|
||||
* @see MongoClientSettings.Builder#applicationName(String)
|
||||
*/
|
||||
public void setApplicationName(@Nullable String applicationName) {
|
||||
this.applicationName = applicationName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryReads
|
||||
* @see MongoClientSettings.Builder#retryReads(boolean)
|
||||
*/
|
||||
public void setRetryReads(@Nullable Boolean retryReads) {
|
||||
this.retryReads = retryReads;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readConcern
|
||||
* @see MongoClientSettings.Builder#readConcern(ReadConcern)
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param writeConcern
|
||||
* @see MongoClientSettings.Builder#writeConcern(WriteConcern)
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryWrites
|
||||
* @see MongoClientSettings.Builder#retryWrites(boolean)
|
||||
*/
|
||||
public void setRetryWrites(@Nullable Boolean retryWrites) {
|
||||
this.retryWrites = retryWrites;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readPreference
|
||||
* @see MongoClientSettings.Builder#readPreference(ReadPreference)
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param streamFactoryFactory
|
||||
* @see MongoClientSettings.Builder#streamFactoryFactory(StreamFactoryFactory)
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param codecRegistry
|
||||
* @see MongoClientSettings.Builder#codecRegistry(CodecRegistry)
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param uUidRepresentation
|
||||
*/
|
||||
public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) {
|
||||
this.uUidRepresentation = uUidRepresentation;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings)
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() {
|
||||
|
||||
Builder builder = MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.applicationName(applicationName) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.applyToClusterSettings((settings) -> {
|
||||
|
||||
settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
if (clusterConnectionMode != null) {
|
||||
settings.mode(clusterConnectionMode);
|
||||
}
|
||||
settings.requiredReplicaSetName(clusterRequiredReplicaSetName);
|
||||
|
||||
if (!CollectionUtils.isEmpty(clusterHosts)) {
|
||||
settings.hosts(clusterHosts);
|
||||
}
|
||||
settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(clusterMaxWaitQueueSize);
|
||||
settings.requiredClusterType(custerRequiredClusterType);
|
||||
|
||||
if (StringUtils.hasText(clusterSrvHost)) {
|
||||
settings.srvHost(clusterSrvHost);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings((settings) -> {
|
||||
|
||||
settings.minSize(poolMinSize);
|
||||
settings.maxSize(poolMaxSize);
|
||||
settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(poolMaxWaitQueueSize);
|
||||
settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToServerSettings((settings) -> {
|
||||
|
||||
settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToSocketSettings((settings) -> {
|
||||
|
||||
settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.receiveBufferSize(socketReceiveBufferSize);
|
||||
settings.sendBufferSize(socketSendBufferSize);
|
||||
}) //
|
||||
.applyToSslSettings((settings) -> {
|
||||
|
||||
settings.enabled(sslEnabled);
|
||||
if (sslEnabled) {
|
||||
|
||||
settings.invalidHostNameAllowed(sslInvalidHostNameAllowed);
|
||||
try {
|
||||
settings.context(
|
||||
StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault());
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (streamFactoryFactory != null) {
|
||||
builder = builder.streamFactoryFactory(streamFactoryFactory);
|
||||
}
|
||||
if (retryReads != null) {
|
||||
builder = builder.retryReads(retryReads);
|
||||
}
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
|
||||
if (uUidRepresentation != null) {
|
||||
builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
@@ -16,9 +16,10 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteConcernResult;
|
||||
import com.mongodb.WriteResult;
|
||||
|
||||
/**
|
||||
* Mongo-specific {@link DataIntegrityViolationException}.
|
||||
@@ -29,17 +30,17 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
|
||||
private static final long serialVersionUID = -186980521176764046L;
|
||||
|
||||
private final WriteConcernResult writeResult;
|
||||
private final WriteResult writeResult;
|
||||
private final MongoActionOperation actionOperation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}.
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}.
|
||||
* @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}.
|
||||
* @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}.
|
||||
*/
|
||||
public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult,
|
||||
public MongoDataIntegrityViolationException(String message, WriteResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
|
||||
super(message);
|
||||
@@ -52,11 +53,11 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link WriteConcernResult} that caused the exception.
|
||||
* Returns the {@link WriteResult} that caused the exception.
|
||||
*
|
||||
* @return the writeResult
|
||||
*/
|
||||
public WriteConcernResult getWriteResult() {
|
||||
public WriteResult getWriteResult() {
|
||||
return writeResult;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,260 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 3.0
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
*/
|
||||
public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
* {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(getDefaultDatabaseName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDatabaseFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,11 +15,26 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} and {@link com.mongodb.MongoClient}
|
||||
* defining common properties such as database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
@@ -27,11 +42,17 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 2.1
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactorySupport} instead.
|
||||
* @see SimpleMongoDbFactory
|
||||
* @see SimpleMongoClientDbFactory
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySupport<C> {
|
||||
public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
@@ -45,6 +66,207 @@ public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySuppo
|
||||
*/
|
||||
protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, exceptionTranslator);
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return getDb(databaseName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return new MongoDbFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDbFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDbFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDbFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return delegate.getLegacyDb();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy(target.getClass().getClassLoader()));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link AutoEncryptionSettings} using the {@link AutoEncryptionSettings.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class MongoEncryptionSettingsFactoryBean implements FactoryBean<AutoEncryptionSettings> {
|
||||
|
||||
private boolean bypassAutoEncryption;
|
||||
private String keyVaultNamespace;
|
||||
private Map<String, Object> extraOptions;
|
||||
private MongoClientSettings keyVaultClientSettings;
|
||||
private Map<String, Map<String, Object>> kmsProviders;
|
||||
private Map<String, BsonDocument> schemaMap;
|
||||
|
||||
/**
|
||||
* @param bypassAutoEncryption
|
||||
* @see AutoEncryptionSettings.Builder#bypassAutoEncryption(boolean)
|
||||
*/
|
||||
public void setBypassAutoEncryption(boolean bypassAutoEncryption) {
|
||||
this.bypassAutoEncryption = bypassAutoEncryption;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param extraOptions
|
||||
* @see AutoEncryptionSettings.Builder#extraOptions(Map)
|
||||
*/
|
||||
public void setExtraOptions(Map<String, Object> extraOptions) {
|
||||
this.extraOptions = extraOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultNamespace
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultNamespace(String)
|
||||
*/
|
||||
public void setKeyVaultNamespace(String keyVaultNamespace) {
|
||||
this.keyVaultNamespace = keyVaultNamespace;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultClientSettings
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultMongoClientSettings(MongoClientSettings)
|
||||
*/
|
||||
public void setKeyVaultClientSettings(MongoClientSettings keyVaultClientSettings) {
|
||||
this.keyVaultClientSettings = keyVaultClientSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param kmsProviders
|
||||
* @see AutoEncryptionSettings.Builder#kmsProviders(Map)
|
||||
*/
|
||||
public void setKmsProviders(Map<String, Map<String, Object>> kmsProviders) {
|
||||
this.kmsProviders = kmsProviders;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param schemaMap
|
||||
* @see AutoEncryptionSettings.Builder#schemaMap(Map)
|
||||
*/
|
||||
public void setSchemaMap(Map<String, BsonDocument> schemaMap) {
|
||||
this.schemaMap = schemaMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public AutoEncryptionSettings getObject() {
|
||||
|
||||
return AutoEncryptionSettings.builder() //
|
||||
.bypassAutoEncryption(bypassAutoEncryption) //
|
||||
.keyVaultNamespace(keyVaultNamespace) //
|
||||
.keyVaultMongoClientSettings(keyVaultClientSettings) //
|
||||
.kmsProviders(orEmpty(kmsProviders)) //
|
||||
.extraOptions(orEmpty(extraOptions)) //
|
||||
.schemaMap(orEmpty(schemaMap)) //
|
||||
.build();
|
||||
}
|
||||
|
||||
private <K, V> Map<K, V> orEmpty(@Nullable Map<K, V> source) {
|
||||
return source != null ? source : Collections.emptyMap();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return AutoEncryptionSettings.class;
|
||||
}
|
||||
}
|
||||
@@ -29,6 +29,7 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
@@ -36,6 +37,7 @@ import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
@@ -110,6 +112,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
@@ -129,7 +135,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
||||
return new MongoTransactionException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
* <strong>Ignored Properties</strong>
|
||||
* <ul>
|
||||
* <li>All properties annotated with {@link org.springframework.data.annotation.Transient}</li>
|
||||
* </ul>
|
||||
* <strong>Property Type Mapping</strong>
|
||||
* <ul>
|
||||
* <li>{@link java.lang.Object} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.util.Arrays} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Collection} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Map} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}</li>
|
||||
* <li>Simple Types -> {@code type : 'the corresponding bson type' }</li>
|
||||
* <li>Domain Types -> {@code type : 'object', properties : {the types properties} }</li>
|
||||
* </ul>
|
||||
* <br />
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface MongoJsonSchemaCreator {
|
||||
|
||||
/**
|
||||
* Create the {@link MongoJsonSchema} for the given {@link Class type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
*
|
||||
* @param mongoConverter must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
|
||||
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,6 @@ import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -41,13 +40,13 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -223,10 +222,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
||||
* {@link com.mongodb.client.FindIterable}.
|
||||
* {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -239,10 +237,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed
|
||||
* by a Mongo DB {@link com.mongodb.client.FindIterable}.
|
||||
* by a Mongo DB {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -361,13 +358,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
IndexOperations indexOps(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level.
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level.
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
@@ -439,11 +434,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
@@ -458,12 +449,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} and
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy,
|
||||
Class<T> entityClass);
|
||||
|
||||
@@ -519,11 +505,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class. The name of the inputCollection is derived from the inputType of
|
||||
* the aggregation.
|
||||
* <p>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -538,11 +524,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class and are returned as stream. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -556,10 +542,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -575,10 +561,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -651,52 +637,24 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected. Must not be {@literal null} nor empty.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -886,15 +844,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -902,16 +857,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -920,18 +872,15 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parametrized type.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -940,19 +889,16 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
@@ -1155,11 +1101,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1171,11 +1112,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1186,11 +1123,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1208,8 +1141,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1270,8 +1203,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1301,111 +1234,99 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult upsert(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult updateFirst(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1413,34 +1334,27 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the specified collection that matches the query document criteria with the
|
||||
* provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName);
|
||||
UpdateResult updateMulti(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1448,22 +1362,16 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Remove the given object from the collection by id.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
|
||||
@@ -1471,10 +1379,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
DeleteResult remove(Object object);
|
||||
|
||||
/**
|
||||
* Removes the given object from the given collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Removes the given object from the given collection.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,763 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
|
||||
/**
|
||||
* {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed.
|
||||
* This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options
|
||||
* for {@literal count}, {@literal remove}, and other methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class QueryOperations {
|
||||
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final EntityOperations entityOperations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
private final CodecRegistryProvider codecRegistryProvider;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final AggregationUtil aggregationUtil;
|
||||
private final Map<Class<?>, Document> mappedShardKey = new ConcurrentHashMap<>(1);
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link QueryOperations}.
|
||||
*
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @param updateMapper must not be {@literal null}.
|
||||
* @param entityOperations must not be {@literal null}.
|
||||
* @param propertyOperations must not be {@literal null}.
|
||||
* @param codecRegistryProvider must not be {@literal null}.
|
||||
*/
|
||||
QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations,
|
||||
PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.entityOperations = entityOperations;
|
||||
this.propertyOperations = propertyOperations;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.mappingContext = queryMapper.getMappingContext();
|
||||
this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link QueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
QueryContext createQueryContext(Query query) {
|
||||
return new QueryContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link DistinctQueryContext}.
|
||||
*/
|
||||
DistinctQueryContext distinctQueryContext(Query query, String fieldName) {
|
||||
return new DistinctQueryContext(query, fieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link CountContext}.
|
||||
*/
|
||||
CountContext countQueryContext(Query query) {
|
||||
return new CountContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting multiple documents.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, true, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param replacement the {@link MappedDocument mapped replacement} document.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) {
|
||||
return new UpdateContext(replacement, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance removing all matching documents.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteQueryContext(Query query) {
|
||||
return new DeleteContext(query, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance only the first matching document.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteSingleContext(Query query) {
|
||||
return new DeleteContext(query, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
|
||||
* representation, mapping fieldnames, as well as determinging and applying {@link Collation collations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class QueryContext {
|
||||
|
||||
private final Query query;
|
||||
|
||||
/**
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be eihter a {@link Query} or a
|
||||
* plain {@link Document}.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
private QueryContext(@Nullable Query query) {
|
||||
this.query = query != null ? query : new Query();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Query getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Document getQueryObject() {
|
||||
return query.getQueryObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the
|
||||
* given{@literal domainType}
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable Class<T> domainType,
|
||||
Function<Class<T>, MongoPersistentEntity<?>> entityLookup) {
|
||||
return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> entity) {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields, entity);
|
||||
} else {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields,
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity != null && entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped {@link Query#getSortObject() sort} option.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document getMappedSort(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedSort(query.getSortObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param consumer must not be {@literal null}.
|
||||
*/
|
||||
void applyCollation(@Nullable Class<?> domainType, Consumer<com.mongodb.client.model.Collation> consumer) {
|
||||
getCollation(domainType).ifPresent(consumer::accept);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Optional<com.mongodb.client.model.Collation> getCollation(@Nullable Class<?> domainType) {
|
||||
|
||||
return entityOperations.forType(domainType).getCollation(query) //
|
||||
.map(Collation::toMongoCollation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DistinctQueryContext extends QueryContext {
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param fieldName must not be {@literal null}.
|
||||
*/
|
||||
private DistinctQueryContext(@Nullable Object query, String fieldName) {
|
||||
|
||||
super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query);
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType, ProjectionFactory projectionFactory) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(new Document(fieldName, 1), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mapped field name to project to.
|
||||
*
|
||||
* @param entity can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getMappedFieldName(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return getMappedFields(entity).keySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the MongoDB native representation of the given {@literal type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
<T> Class<T> getDriverCompatibleClass(Class<T> type) {
|
||||
|
||||
return codecRegistryProvider.getCodecFor(type) //
|
||||
.map(Codec::getEncoderClass) //
|
||||
.orElse((Class<T>) BsonValue.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the most speficic read target type based on the user {@literal requestedTargetType} an the property type
|
||||
* based on meta information extracted from the {@literal domainType}.
|
||||
*
|
||||
* @param requestedTargetType must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Class<?> getMostSpecificConversionTargetType(Class<?> requestedTargetType, Class<?> domainType) {
|
||||
|
||||
Class<?> conversionTargetType = requestedTargetType;
|
||||
try {
|
||||
|
||||
Class<?> propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType();
|
||||
|
||||
// use the more specific type but favor UserType over property one
|
||||
if (ClassUtils.isAssignable(requestedTargetType, propertyType)) {
|
||||
conversionTargetType = propertyType;
|
||||
}
|
||||
} catch (PropertyReferenceException e) {
|
||||
// just don't care about it as we default to Object.class anyway.
|
||||
}
|
||||
|
||||
return conversionTargetType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class CountContext extends QueryContext {
|
||||
|
||||
/**
|
||||
* Creates a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
CountContext(@Nullable Query query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType) {
|
||||
return getCountOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType, @Nullable Consumer<CountOptions> callback) {
|
||||
|
||||
CountOptions options = new CountOptions();
|
||||
Query query = getQuery();
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
|
||||
String hint = query.getHint();
|
||||
if (BsonUtils.isJsonDocument(hint)) {
|
||||
options.hint(BsonUtils.parse(hint, codecRegistryProvider));
|
||||
} else {
|
||||
options.hintString(hint);
|
||||
}
|
||||
}
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DeleteContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
|
||||
/**
|
||||
* Crate a new {@link DeleteContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one.
|
||||
*/
|
||||
DeleteContext(@Nullable Query query, boolean multi) {
|
||||
|
||||
super(query);
|
||||
this.multi = multi;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType) {
|
||||
return getDeleteOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType, @Nullable Consumer<DeleteOptions> callback) {
|
||||
|
||||
DeleteOptions options = new DeleteOptions();
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents shall be deleted.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}.
|
||||
*/
|
||||
class UpdateContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
private final boolean upsert;
|
||||
private final @Nullable UpdateDefinition update;
|
||||
private final @Nullable MappedDocument mappedDocument;
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) {
|
||||
this(update, new BasicQuery(query), multi, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) {
|
||||
|
||||
super(query);
|
||||
|
||||
this.multi = multi;
|
||||
this.upsert = upsert;
|
||||
this.update = update;
|
||||
this.mappedDocument = null;
|
||||
}
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
this.update = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType) {
|
||||
return getUpdateOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType, @Nullable Consumer<UpdateOptions> callback) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update != null && update.hasArrayFilters()) {
|
||||
options
|
||||
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType) {
|
||||
return getReplaceOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType, @Nullable Consumer<ReplaceOptions> callback) {
|
||||
|
||||
UpdateOptions updateOptions = getUpdateOptions(domainType);
|
||||
|
||||
ReplaceOptions options = new ReplaceOptions();
|
||||
options.collation(updateOptions.getCollation());
|
||||
options.upsert(updateOptions.isUpsert());
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> domainType) {
|
||||
|
||||
Document mappedQuery = super.getMappedQuery(domainType);
|
||||
|
||||
if (multi && update.isIsolated() && !mappedQuery.containsKey("$isolated")) {
|
||||
mappedQuery.put("$isolated", 1);
|
||||
}
|
||||
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
<T> Document applyShardKey(MongoPersistentEntity<T> domainType, Document filter, @Nullable Document existing) {
|
||||
|
||||
Document shardKeySource = existing != null ? existing
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity<?> domainType) {
|
||||
|
||||
return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType)
|
||||
&& !filter.keySet().containsAll(getMappedShardKeyFields(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities
|
||||
* {@literal id} property.
|
||||
* @since 3.0
|
||||
*/
|
||||
private boolean shardedById(MongoPersistentEntity<?> domainType) {
|
||||
|
||||
ShardKey shardKey = domainType.getShardKey();
|
||||
if (shardKey.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String key = shardKey.getPropertyNames().iterator().next();
|
||||
if ("_id".equals(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = domainType.getIdProperty();
|
||||
return idProperty != null && idProperty.getName().equals(key);
|
||||
}
|
||||
|
||||
Set<String> getMappedShardKeyFields(MongoPersistentEntity<?> entity) {
|
||||
return getMappedShardKey(entity).keySet();
|
||||
}
|
||||
|
||||
Document getMappedShardKey(MongoPersistentEntity<?> entity) {
|
||||
return mappedShardKey.computeIfAbsent(entity.getType(),
|
||||
key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
|
||||
|
||||
AggregationOperationContext context = domainType != null
|
||||
? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped update {@link Document}.
|
||||
*
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
Document getMappedUpdate(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (update != null) {
|
||||
return update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
}
|
||||
return mappedDocument.getDocument();
|
||||
}
|
||||
|
||||
/**
|
||||
* Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not
|
||||
* already done in the actual {@link UpdateDefinition}
|
||||
*
|
||||
* @param persistentEntity can be {@literal null}.
|
||||
*/
|
||||
void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
|
||||
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the update holds an aggregation pipeline.
|
||||
*/
|
||||
boolean isAggregationUpdate() {
|
||||
return update instanceof AggregationUpdate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents should be updated.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,200 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStreamOperation} allows creation and execution of reactive MongoDB
|
||||
* <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> operations in a fluent API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping a potentially given
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} used for filtering. By default, the
|
||||
* originating {@literal domainType} is also used for mapping back the result from the {@link org.bson.Document}.
|
||||
* However, it is possible to define an different {@literal returnType} via {@code as}.<br />
|
||||
* The collection to operate on is optional in which case call collection with the actual database are watched, use
|
||||
* {@literal watchCollection} to define a fixed collection.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* changeStream(Jedi.class)
|
||||
* .watchCollection("star-wars")
|
||||
* .filter(where("operationType").is("insert"))
|
||||
* .resumeAt(Instant.now())
|
||||
* .listen();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface ReactiveChangeStreamOperation {
|
||||
|
||||
/**
|
||||
* Start creating a change stream operation for the given {@literal domainType} watching all collections within the
|
||||
* database. <br />
|
||||
* Consider limiting events be defining a {@link ChangeStreamWithCollection#watchCollection(String) collection} and/or
|
||||
* {@link ChangeStreamWithFilterAndProjection#filter(CriteriaDefinition) filter}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}. Use {@link org.bson.Document} to obtain raw elements.
|
||||
* @return new instance of {@link ReactiveChangeStream}. Never {@literal null}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> ReactiveChangeStream<T> changeStream(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Compose change stream execution by calling one of the terminating methods.
|
||||
*/
|
||||
interface TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
|
||||
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
|
||||
*/
|
||||
Flux<ChangeStreamEvent<T>> listen();
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (optional).
|
||||
*/
|
||||
interface ChangeStreamWithCollection<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to watch.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code collection} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(String collection);
|
||||
|
||||
/**
|
||||
* Set the the collection to watch. Collection name is derived from the {@link Class entityClass}.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code entityClass} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a filter for limiting results (optional).
|
||||
*/
|
||||
interface ChangeStreamWithFilterAndProjection<T> extends ResumingChangeStream<T>, TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Use an {@link Aggregation} to filter matching events.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link Aggregation} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(Aggregation by);
|
||||
|
||||
/**
|
||||
* Use a {@link CriteriaDefinition critera} to filter matching events via an
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation}.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link CriteriaDefinition} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by);
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume a change stream. (optional).
|
||||
*/
|
||||
interface ResumingChangeStream<T> extends TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Resume the change stream at a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(Instant)
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(BsonTimestamp)
|
||||
* @throws IllegalArgumentException if the given beacon is neither {@link Instant} nor {@link BsonTimestamp}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAt(Object token);
|
||||
|
||||
/**
|
||||
* Resume the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAfter(BsonValue)
|
||||
* @see ChangeStreamOptionsBuilder#resumeToken(BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAfter(Object token);
|
||||
|
||||
/**
|
||||
* Start the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#startAfter(BsonValue) (BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> startAfter(Object token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide some options.
|
||||
*/
|
||||
interface ChangeStreamWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Provide some options via the callback by modifying the given {@link ChangeStreamOptionsBuilder}. Previously
|
||||
* defined options like a {@link ResumingChangeStream#resumeAfter(Object) resumeToken} are carried over to the
|
||||
* builder and can be overwritten via eg. {@link ChangeStreamOptionsBuilder#resumeToken(BsonValue)}.
|
||||
*
|
||||
* @param optionsConsumer never {@literal null}.
|
||||
* @return new instance of {@link ReactiveChangeStream}.
|
||||
*/
|
||||
ReactiveChangeStream<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStream} provides methods for constructing change stream operations in a fluent way.
|
||||
*/
|
||||
interface ReactiveChangeStream<T> extends ChangeStreamWithOptions<T>, ChangeStreamWithCollection<T>,
|
||||
TerminatingChangeStream<T>, ResumingChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {}
|
||||
}
|
||||
@@ -1,230 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.MatchOperation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperation {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
/**
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
ReactiveChangeStreamOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation#changeStream(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> ReactiveChangeStream<T> changeStream(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null);
|
||||
}
|
||||
|
||||
static class ReactiveChangeStreamSupport<T>
|
||||
implements ReactiveChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final @Nullable String collection;
|
||||
private final @Nullable ChangeStreamOptions options;
|
||||
|
||||
private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
@Nullable String collection, @Nullable ChangeStreamOptions options) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Collection type not be null!");
|
||||
|
||||
return watchCollection(template.getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAt(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAt(Object token) {
|
||||
|
||||
return withOptions(builder -> {
|
||||
|
||||
if (token instanceof Instant) {
|
||||
builder.resumeAt((Instant) token);
|
||||
} else if (token instanceof BsonTimestamp) {
|
||||
builder.resumeAt((BsonTimestamp) token);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.resumeAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#startAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> startAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.startAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithOptions#withOptions(java.util.function.Consumer)
|
||||
*/
|
||||
@Override
|
||||
public ReactiveChangeStreamSupport<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer) {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = initOptionsBuilder();
|
||||
optionsConsumer.accept(builder);
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, builder.build());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.aggregation.Aggregation)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(Aggregation filter) {
|
||||
return withOptions(builder -> builder.filter(filter));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.query.CriteriaDefinition)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by) {
|
||||
|
||||
MatchOperation $match = Aggregation.match(by);
|
||||
Aggregation aggregation = !Document.class.equals(domainType) ? Aggregation.newAggregation(domainType, $match)
|
||||
: Aggregation.newAggregation($match);
|
||||
return filter(aggregation);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.TerminatingChangeStream#listen()
|
||||
*/
|
||||
@Override
|
||||
public Flux<ChangeStreamEvent<T>> listen() {
|
||||
return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType);
|
||||
}
|
||||
|
||||
private ChangeStreamOptionsBuilder initOptionsBuilder() {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = ChangeStreamOptions.builder();
|
||||
if (options == null) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
options.getFilter().ifPresent(it -> {
|
||||
if (it instanceof Aggregation) {
|
||||
builder.filter((Aggregation) it);
|
||||
} else {
|
||||
builder.filter(((List<Document>) it).toArray(new Document[0]));
|
||||
}
|
||||
});
|
||||
options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup);
|
||||
options.getCollation().ifPresent(builder::collation);
|
||||
|
||||
if (options.isResumeAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::resumeAfter);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAfter);
|
||||
} else if (options.isStartAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::startAfter);
|
||||
} else {
|
||||
options.getResumeTimestamp().ifPresent(builder::resumeAt);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAt);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,6 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
@@ -39,14 +38,13 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* query(Human.class)
|
||||
* .inCollection("star-wars")
|
||||
* .as(Jedi.class)
|
||||
* .matching(where("firstname").is("luke"))
|
||||
* .matching(query(where("firstname").is("luke")))
|
||||
* .all();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Juergen Zimmermann
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFindOperation {
|
||||
@@ -146,18 +144,6 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
@@ -273,21 +259,9 @@ public interface ReactiveFindOperation {
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
TerminatingDistinct<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingDistinct}.
|
||||
* @throws IllegalArgumentException if criteria is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingDistinct<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -23,7 +23,6 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -32,6 +31,8 @@ import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ReactiveFindOperation}.
|
||||
*
|
||||
@@ -119,7 +120,12 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
public Mono<T> first() {
|
||||
|
||||
FindPublisherPreparer preparer = getCursorPreparer(query);
|
||||
Flux<T> result = doFind(publisher -> preparer.prepare(publisher).limit(1));
|
||||
Flux<T> result = doFind(new FindPublisherPreparer() {
|
||||
@Override
|
||||
public <D> FindPublisher<D> prepare(FindPublisher<D> publisher) {
|
||||
return preparer.prepare(publisher).limit(1);
|
||||
}
|
||||
});
|
||||
|
||||
return result.next();
|
||||
}
|
||||
@@ -132,7 +138,12 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
public Mono<T> one() {
|
||||
|
||||
FindPublisherPreparer preparer = getCursorPreparer(query);
|
||||
Flux<T> result = doFind(publisher -> preparer.prepare(publisher).limit(2));
|
||||
Flux<T> result = doFind(new FindPublisherPreparer() {
|
||||
@Override
|
||||
public <D> FindPublisher<D> prepare(FindPublisher<D> publisher) {
|
||||
return preparer.prepare(publisher).limit(2);
|
||||
}
|
||||
});
|
||||
|
||||
return result.collectList().flatMap(it -> {
|
||||
|
||||
|
||||
@@ -23,4 +23,4 @@ package org.springframework.data.mongodb.core;
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation,
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation, ReactiveChangeStreamOperation {}
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation {}
|
||||
|
||||
@@ -19,7 +19,6 @@ import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -147,18 +146,6 @@ public interface ReactiveMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteria) {
|
||||
return matching(Query.query(criteria));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -22,7 +22,7 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
|
||||
@@ -15,17 +15,192 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
* @deprecated since 3.0 - Use {@link MongoClientSettingsFactoryBean} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends MongoClientSettingsFactoryBean {
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private List<MongoCredential> credentialList = new ArrayList<>();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings();
|
||||
private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings();
|
||||
private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings();
|
||||
private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings();
|
||||
private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings();
|
||||
private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings();
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern}.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadConcern}.
|
||||
*
|
||||
* @param readConcern
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the List of {@link MongoCredential}s.
|
||||
*
|
||||
* @param credentialList must not be {@literal null}.
|
||||
*/
|
||||
public void setCredentialList(List<MongoCredential> credentialList) {
|
||||
|
||||
Assert.notNull(credentialList, "CredendialList must not be null!");
|
||||
|
||||
this.credentialList.addAll(credentialList);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the {@link MongoCredential} to the list of credentials.
|
||||
*
|
||||
* @param mongoCredential must not be {@literal null}.
|
||||
*/
|
||||
public void addMongoCredential(MongoCredential mongoCredential) {
|
||||
|
||||
Assert.notNull(mongoCredential, "MongoCredential must not be null!");
|
||||
|
||||
this.credentialList.add(mongoCredential);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link StreamFactoryFactory}.
|
||||
*
|
||||
* @param streamFactoryFactory
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link CodecRegistry}.
|
||||
*
|
||||
* @param codecRegistry
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClusterSettings}.
|
||||
*
|
||||
* @param clusterSettings
|
||||
*/
|
||||
public void setClusterSettings(ClusterSettings clusterSettings) {
|
||||
this.clusterSettings = clusterSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SocketSettings}.
|
||||
*
|
||||
* @param socketSettings
|
||||
*/
|
||||
public void setSocketSettings(SocketSettings socketSettings) {
|
||||
this.socketSettings = socketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the heartbeat {@link SocketSettings}.
|
||||
*
|
||||
* @param heartbeatSocketSettings
|
||||
*/
|
||||
public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) {
|
||||
this.heartbeatSocketSettings = heartbeatSocketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ConnectionPoolSettings}.
|
||||
*
|
||||
* @param connectionPoolSettings
|
||||
*/
|
||||
public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) {
|
||||
this.connectionPoolSettings = connectionPoolSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ServerSettings}.
|
||||
*
|
||||
* @param serverSettings
|
||||
*/
|
||||
public void setServerSettings(ServerSettings serverSettings) {
|
||||
this.serverSettings = serverSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SslSettings}.
|
||||
*
|
||||
* @param sslSettings
|
||||
*/
|
||||
public void setSslSettings(SslSettings sslSettings) {
|
||||
this.sslSettings = sslSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() throws Exception {
|
||||
|
||||
return MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.credentialList(credentialList) //
|
||||
.streamFactoryFactory(streamFactoryFactory) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.clusterSettings(clusterSettings) //
|
||||
.socketSettings(socketSettings) //
|
||||
.heartbeatSocketSettings(heartbeatSocketSettings) //
|
||||
.connectionPoolSettings(connectionPoolSettings) //
|
||||
.serverSettings(serverSettings) //
|
||||
.sslSettings(sslSettings) //
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -40,9 +39,7 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
@@ -66,7 +63,6 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mathieu Ouellet
|
||||
* @since 2.0
|
||||
* @see Flux
|
||||
* @see Mono
|
||||
@@ -224,9 +220,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
*
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction();
|
||||
|
||||
/**
|
||||
@@ -241,9 +235,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction(Publisher<ClientSession> sessionProvider);
|
||||
|
||||
/**
|
||||
@@ -299,7 +291,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param collectionName name of the collection.
|
||||
* @return an existing collection or one created on first server interaction.
|
||||
*/
|
||||
Mono<MongoCollection<Document>> getCollection(String collectionName);
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
@@ -627,52 +619,24 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB
|
||||
* limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect
|
||||
* a particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -681,14 +645,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -696,15 +657,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -713,16 +671,13 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parametrized type.
|
||||
* @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()}
|
||||
* this will either be the object as it was before the update or as it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -731,17 +686,14 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()}
|
||||
* this will either be the object as it was before the update or as it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
@@ -935,11 +887,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -951,11 +898,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -966,11 +909,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -988,8 +927,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1048,8 +987,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1096,8 +1035,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1133,8 +1072,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1164,111 +1103,99 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, String collectionName);
|
||||
Mono<UpdateResult> upsert(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, String collectionName);
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1276,34 +1203,27 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the specified collection that matches the query document criteria with the
|
||||
* provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
* domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, String collectionName);
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1311,16 +1231,13 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by id.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user