Compare commits
500 Commits
2.1.16.REL
...
3.0.0.RC1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1e35116419 | ||
|
|
dd336f0ecb | ||
|
|
d020219ded | ||
|
|
0345eff69a | ||
|
|
55fee27fb6 | ||
|
|
ffba352e15 | ||
|
|
1118df5550 | ||
|
|
29f05af733 | ||
|
|
7bac739146 | ||
|
|
6366d3cec1 | ||
|
|
44913abd80 | ||
|
|
7a7f7c942d | ||
|
|
e9c9938016 | ||
|
|
c9da0a75ff | ||
|
|
581961e79a | ||
|
|
0e0d726457 | ||
|
|
7b5fea960f | ||
|
|
a04821ff90 | ||
|
|
a6bd41bcf2 | ||
|
|
6387eb9762 | ||
|
|
5fb4b036bb | ||
|
|
4f0dc04a81 | ||
|
|
ee59c6b774 | ||
|
|
b0b905ddb7 | ||
|
|
7f7be5e47d | ||
|
|
5a49aa6519 | ||
|
|
b68079c421 | ||
|
|
fde49f2a5a | ||
|
|
4d73d76b9f | ||
|
|
8f2c806403 | ||
|
|
d0eb76946e | ||
|
|
df43d7fcdb | ||
|
|
7b34a602ed | ||
|
|
c829387c82 | ||
|
|
a4e12a96c9 | ||
|
|
7e3f7bd861 | ||
|
|
46ab6b4c94 | ||
|
|
8029acb3fb | ||
|
|
96ffb0b7f4 | ||
|
|
7849b5333b | ||
|
|
fb45b4eb2a | ||
|
|
a6a84421b4 | ||
|
|
c4dcc7d0f5 | ||
|
|
c25a1a9e53 | ||
|
|
81c68955fe | ||
|
|
22ca597fca | ||
|
|
6259cd2c3b | ||
|
|
f153399c3b | ||
|
|
65508eb01b | ||
|
|
a7c25c8524 | ||
|
|
087f8f78b5 | ||
|
|
534ce2e5f5 | ||
|
|
0f4f745d3c | ||
|
|
ca802c7ca4 | ||
|
|
e9d14f41a5 | ||
|
|
3b6880edfd | ||
|
|
0b77906a83 | ||
|
|
993018feb1 | ||
|
|
dd15bbc5c1 | ||
|
|
f6c62d6e2f | ||
|
|
632c499cf1 | ||
|
|
2c9956723e | ||
|
|
d0c2c820d7 | ||
|
|
f69ddb6c61 | ||
|
|
073668b1d1 | ||
|
|
291d84591c | ||
|
|
46806a5606 | ||
|
|
85519eb84d | ||
|
|
f8ee9648da | ||
|
|
c7f9274480 | ||
|
|
0bc0fff24e | ||
|
|
c21b35973d | ||
|
|
44017485fc | ||
|
|
c56a13ad00 | ||
|
|
0a2ea88f3c | ||
|
|
80da9e21ed | ||
|
|
708466b323 | ||
|
|
2905315452 | ||
|
|
ecc9f3fd60 | ||
|
|
c83929c2a3 | ||
|
|
9abf098bdb | ||
|
|
f688cca400 | ||
|
|
f09c622b2f | ||
|
|
6049541d0a | ||
|
|
2a3f862703 | ||
|
|
8b97e932a2 | ||
|
|
5f29bee6c9 | ||
|
|
5b8037a0ad | ||
|
|
1a19e140ee | ||
|
|
83307c424f | ||
|
|
8394523645 | ||
|
|
62be436569 | ||
|
|
44e6f5fa3f | ||
|
|
f2fb05662f | ||
|
|
a1fbe31551 | ||
|
|
fca1a79f81 | ||
|
|
cbf87e6e49 | ||
|
|
af133c26d0 | ||
|
|
ab5b1f0140 | ||
|
|
3a5876537b | ||
|
|
48f51d0fb0 | ||
|
|
fb499f8c69 | ||
|
|
bcc2c94e03 | ||
|
|
284f547e99 | ||
|
|
d824f3b8b2 | ||
|
|
7b1a96f4a9 | ||
|
|
4f75fec5c5 | ||
|
|
61bd56dd74 | ||
|
|
b4222c5646 | ||
|
|
4a45928aee | ||
|
|
e2200eccf7 | ||
|
|
96c4901e7f | ||
|
|
1393f23f3b | ||
|
|
9bbe1f2a26 | ||
|
|
eacfe2b8f7 | ||
|
|
674f0cca2b | ||
|
|
da7b51a51e | ||
|
|
a1b5175299 | ||
|
|
56fff286aa | ||
|
|
60112b4d14 | ||
|
|
277b7a1c7c | ||
|
|
c5b892f03b | ||
|
|
dbf43941be | ||
|
|
1d98b77f3d | ||
|
|
cd4e4065ff | ||
|
|
9f4872de4f | ||
|
|
7abf69e575 | ||
|
|
588ed2b0e2 | ||
|
|
8a228f22cd | ||
|
|
d652b2bdb4 | ||
|
|
f7e527e45c | ||
|
|
aede7c9cad | ||
|
|
1a5de2e1db | ||
|
|
cc07a1bbb4 | ||
|
|
32cbae0e5f | ||
|
|
9eaf67148d | ||
|
|
ff60149166 | ||
|
|
f7a010827d | ||
|
|
1e8c4cec5b | ||
|
|
0e0b45597b | ||
|
|
b014fe4c7c | ||
|
|
ead7679f21 | ||
|
|
18cf94dc34 | ||
|
|
30d99445e2 | ||
|
|
839aecee84 | ||
|
|
c6592b01b0 | ||
|
|
48176a833c | ||
|
|
0facdcfa98 | ||
|
|
837a9885f2 | ||
|
|
6cc5bc627a | ||
|
|
84f7f433f7 | ||
|
|
7df9e932f7 | ||
|
|
5308e6c37f | ||
|
|
bc1c6c9a64 | ||
|
|
62006129ca | ||
|
|
5c2370399f | ||
|
|
3bffe402c0 | ||
|
|
6959c56df6 | ||
|
|
b5bc4320aa | ||
|
|
909c51d00a | ||
|
|
4a04e82e86 | ||
|
|
1370653c93 | ||
|
|
ebfcfb3834 | ||
|
|
77aafc597b | ||
|
|
a4503812bd | ||
|
|
8943e38944 | ||
|
|
c700a508d5 | ||
|
|
11a23becce | ||
|
|
ad2a3483be | ||
|
|
499f019798 | ||
|
|
d0e782687a | ||
|
|
928134c6e1 | ||
|
|
48034a348d | ||
|
|
565132c619 | ||
|
|
4a012d5654 | ||
|
|
ccb8ae08a6 | ||
|
|
8cc0609190 | ||
|
|
004e8ba0ea | ||
|
|
9093f8d8af | ||
|
|
801a3645a9 | ||
|
|
9ebe1439f1 | ||
|
|
26dc0c5c27 | ||
|
|
6e1d426a87 | ||
|
|
f549fd18b5 | ||
|
|
a3a547a706 | ||
|
|
0ff8171309 | ||
|
|
63dfb59a3f | ||
|
|
d33ee2ffac | ||
|
|
59388d99cc | ||
|
|
ee6048e289 | ||
|
|
9a062d53f3 | ||
|
|
a3c5b07eb7 | ||
|
|
40d30a230d | ||
|
|
10116f7c93 | ||
|
|
705203c898 | ||
|
|
8fb9d9e5f4 | ||
|
|
c23c5ae6c6 | ||
|
|
e67bacf66c | ||
|
|
617dbdac3f | ||
|
|
8ad4f4b71b | ||
|
|
9048ec83af | ||
|
|
b2e3e3fb8e | ||
|
|
e9a2b84af5 | ||
|
|
5b8be281fb | ||
|
|
66b318dabe | ||
|
|
b78569374a | ||
|
|
79921a7260 | ||
|
|
bcd8f242e5 | ||
|
|
73fab14b21 | ||
|
|
d4505880c7 | ||
|
|
a54b91392e | ||
|
|
5dc7e7c65f | ||
|
|
8802e2c36f | ||
|
|
d8189620d2 | ||
|
|
fd6411f0ed | ||
|
|
445b9c83de | ||
|
|
4e13bda302 | ||
|
|
e634f2f7c0 | ||
|
|
0bb2b8785d | ||
|
|
9505112670 | ||
|
|
8167f9d199 | ||
|
|
e6bd8b3ee3 | ||
|
|
e33c8a3b76 | ||
|
|
34425c54db | ||
|
|
7d9c08409b | ||
|
|
69a4217c4b | ||
|
|
0a0ee417ac | ||
|
|
2365dba8d9 | ||
|
|
4b22558fe7 | ||
|
|
75847981c3 | ||
|
|
0e32b7356c | ||
|
|
c2436bcdfc | ||
|
|
7c25675cbb | ||
|
|
0145b68f61 | ||
|
|
c7ea0782df | ||
|
|
c3ef62a7be | ||
|
|
0eebf64ec3 | ||
|
|
bdd2b236e8 | ||
|
|
804106ec9a | ||
|
|
b3e70475eb | ||
|
|
1f146c3b5c | ||
|
|
d49b1c33b7 | ||
|
|
da0cb6d766 | ||
|
|
56ac8397aa | ||
|
|
945d3b0085 | ||
|
|
fad18341fa | ||
|
|
394efa8b82 | ||
|
|
880b3c6ee0 | ||
|
|
4307c46619 | ||
|
|
86bcfde568 | ||
|
|
5b3a9434f1 | ||
|
|
f2ac413c61 | ||
|
|
4e44d07402 | ||
|
|
1dc6a04d74 | ||
|
|
513e6a0111 | ||
|
|
8ca16e1cb4 | ||
|
|
296e97903d | ||
|
|
617828533e | ||
|
|
c0a22667b9 | ||
|
|
ada6eb814e | ||
|
|
4a17048ec6 | ||
|
|
06018fa3de | ||
|
|
8b406b23ff | ||
|
|
a3ef9b5856 | ||
|
|
bb280bd59b | ||
|
|
e24c5e0846 | ||
|
|
839aa1b1e6 | ||
|
|
ff7a189c98 | ||
|
|
de144a6b0d | ||
|
|
bfe514acd7 | ||
|
|
87343d2ae5 | ||
|
|
5e452e1be0 | ||
|
|
8ce8a8307a | ||
|
|
df94214527 | ||
|
|
31f8a63a17 | ||
|
|
888054bb2a | ||
|
|
29bf74c24c | ||
|
|
1798678e4d | ||
|
|
bd78ccede9 | ||
|
|
349b87a2c4 | ||
|
|
63ed62b988 | ||
|
|
8c1a7cc163 | ||
|
|
597354ea7e | ||
|
|
c09188561c | ||
|
|
c2ea595b8c | ||
|
|
f7731c7cf1 | ||
|
|
a7e6b26796 | ||
|
|
45bd6d544d | ||
|
|
d819028e46 | ||
|
|
b353bb6165 | ||
|
|
843ed64b87 | ||
|
|
f5c00b6978 | ||
|
|
05d585896c | ||
|
|
d2999b0918 | ||
|
|
2a6107fcb6 | ||
|
|
d937460351 | ||
|
|
89843a1488 | ||
|
|
e06f326de3 | ||
|
|
684e24aff5 | ||
|
|
7a22d697cf | ||
|
|
505ca4d2c4 | ||
|
|
b01b25ef93 | ||
|
|
60cb8c7de7 | ||
|
|
b4bc95ce5f | ||
|
|
b48ff3c38b | ||
|
|
4ab61bd4d4 | ||
|
|
24f23c5365 | ||
|
|
8d2a68118b | ||
|
|
8ce3e45d60 | ||
|
|
f456851791 | ||
|
|
221ffb1947 | ||
|
|
c3a5454d31 | ||
|
|
207c82a50b | ||
|
|
7596881a06 | ||
|
|
3ea670fb3b | ||
|
|
8df1c88db1 | ||
|
|
a34e8591de | ||
|
|
ebc9e3ba3a | ||
|
|
74368dd0e8 | ||
|
|
fff971e469 | ||
|
|
352f9ad372 | ||
|
|
89624518c9 | ||
|
|
8e02fd0329 | ||
|
|
0b49f47230 | ||
|
|
5c10a5821b | ||
|
|
c15ab4c946 | ||
|
|
c3769df07f | ||
|
|
33d69abd53 | ||
|
|
e683c8f08f | ||
|
|
ac1873a163 | ||
|
|
8b7071e6ae | ||
|
|
b8368a677d | ||
|
|
5c333d6159 | ||
|
|
7f4d3f27e6 | ||
|
|
a30ee07b4a | ||
|
|
cc45b4e081 | ||
|
|
0d0d17ebf6 | ||
|
|
b53caaf69d | ||
|
|
98091f9096 | ||
|
|
010f182c2b | ||
|
|
e8553468e7 | ||
|
|
8741b85806 | ||
|
|
8fbaba0a7c | ||
|
|
82da8027f5 | ||
|
|
9c26859c04 | ||
|
|
02931ecb3a | ||
|
|
32399f63bb | ||
|
|
0f3c90bea9 | ||
|
|
7f88889984 | ||
|
|
f869ea4bcd | ||
|
|
20fab74965 | ||
|
|
ed9173c384 | ||
|
|
38d8529b81 | ||
|
|
54d1fd20ce | ||
|
|
84ead72680 | ||
|
|
10128d4d38 | ||
|
|
3fa8e80764 | ||
|
|
54a6a28806 | ||
|
|
7676bd28b7 | ||
|
|
d2a9bf0c6c | ||
|
|
847dfebce0 | ||
|
|
c3857209d5 | ||
|
|
f3b7ce8079 | ||
|
|
24677fabd1 | ||
|
|
d7cd97ea89 | ||
|
|
9a64830e95 | ||
|
|
39a163bdbc | ||
|
|
2a7821ea9c | ||
|
|
3eb9d790b9 | ||
|
|
28883f37d9 | ||
|
|
407f998a13 | ||
|
|
6e6bf3a87e | ||
|
|
2eedef5e49 | ||
|
|
f5d6947d11 | ||
|
|
78fc2e0456 | ||
|
|
2dc7c0ecb4 | ||
|
|
cd930ea0b7 | ||
|
|
70098ae426 | ||
|
|
4c241b36b6 | ||
|
|
e33f992f2d | ||
|
|
7c8ec578f2 | ||
|
|
5dc5d7b2eb | ||
|
|
8394a5a184 | ||
|
|
acab0b2976 | ||
|
|
7f8319785d | ||
|
|
ec50d96d23 | ||
|
|
3c8d84439e | ||
|
|
c39ab8a23f | ||
|
|
e3a81e71ab | ||
|
|
d4562ba3de | ||
|
|
9d307bd115 | ||
|
|
38fe8d4601 | ||
|
|
1d910da697 | ||
|
|
461b5f3568 | ||
|
|
745dae4993 | ||
|
|
6ce23a0d90 | ||
|
|
007b6d4f98 | ||
|
|
5e8d752be1 | ||
|
|
722b6eb389 | ||
|
|
f8560aac0b | ||
|
|
39859b73a3 | ||
|
|
09ffa02d1a | ||
|
|
ec96c2d48e | ||
|
|
c5e2d4261d | ||
|
|
2e106d4f0c | ||
|
|
88d5e4367f | ||
|
|
18fa4deeb5 | ||
|
|
717ca19ad1 | ||
|
|
dc38369f18 | ||
|
|
5b47648f49 | ||
|
|
ddad63f28e | ||
|
|
aab1649e87 | ||
|
|
120b7bab2c | ||
|
|
cbed57ee2b | ||
|
|
b8f1062ac2 | ||
|
|
271b624c56 | ||
|
|
8e21cc181e | ||
|
|
45f4b5087c | ||
|
|
f7f004ec8a | ||
|
|
89fde7f8e9 | ||
|
|
9f58c78e43 | ||
|
|
b54cd4e391 | ||
|
|
7500ba18fd | ||
|
|
4ecf20ce4c | ||
|
|
e8a3b6935e | ||
|
|
3a97b3fbf1 | ||
|
|
f4c9cdcacb | ||
|
|
cd8402f4ba | ||
|
|
3811ddf912 | ||
|
|
9cc7fc2a08 | ||
|
|
f40861beff | ||
|
|
33e4e38e4a | ||
|
|
915385b710 | ||
|
|
e0a12d77f7 | ||
|
|
16051106c0 | ||
|
|
2f60d08019 | ||
|
|
5c79ff387e | ||
|
|
33fa79b29f | ||
|
|
8f3dacd55e | ||
|
|
4ffd1e10e8 | ||
|
|
f1a18040bb | ||
|
|
0f1536f136 | ||
|
|
d8bfe68b07 | ||
|
|
2da0ecbe72 | ||
|
|
8ad16c1f23 | ||
|
|
4be3fe006f | ||
|
|
103de476cd | ||
|
|
630da43b69 | ||
|
|
5c79e26046 | ||
|
|
c9d441027a | ||
|
|
161a983c5d | ||
|
|
9288472fd9 | ||
|
|
7d5d0bbdfc | ||
|
|
eacc085863 | ||
|
|
9f6f60c769 | ||
|
|
2a45695ef3 | ||
|
|
2c9042a027 | ||
|
|
c339c24b34 | ||
|
|
32ae2e9ddc | ||
|
|
6fa0625167 | ||
|
|
e88b88ef36 | ||
|
|
2ca879f534 | ||
|
|
8d969ef41d | ||
|
|
bbd02107b2 | ||
|
|
4a1a9a7b0c | ||
|
|
adf16bb31f | ||
|
|
a834f29f32 | ||
|
|
d8721c9930 | ||
|
|
8fe734deb1 | ||
|
|
9246d3311d | ||
|
|
6e3e3e4a01 | ||
|
|
37b9eb3580 | ||
|
|
da1384b84d | ||
|
|
6c0dd2c80e | ||
|
|
28d96f5fe2 | ||
|
|
5372a5c8c1 | ||
|
|
8672dff5bc | ||
|
|
e010bee286 | ||
|
|
07b8e8278e | ||
|
|
8f4db5540d | ||
|
|
9a0dad1723 | ||
|
|
a4c5682f4d | ||
|
|
b2d2c12347 | ||
|
|
b50b645d1f | ||
|
|
4c6f793870 | ||
|
|
5d39191f0b | ||
|
|
75b6dc7a0e | ||
|
|
0c8f176e70 | ||
|
|
c900ceb0d9 | ||
|
|
9072e0a703 | ||
|
|
67991568f3 | ||
|
|
bc981be9f3 | ||
|
|
398eafe615 | ||
|
|
4673525462 | ||
|
|
5c009e393f | ||
|
|
e8e24632e4 | ||
|
|
5c54663adf | ||
|
|
f04e67edf0 | ||
|
|
74269aa6c0 | ||
|
|
bb743f494a |
27
.travis.yml
27
.travis.yml
@@ -1,8 +1,5 @@
|
||||
language: java
|
||||
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
|
||||
before_install:
|
||||
- mkdir -p downloads
|
||||
- mkdir -p var/db var/log
|
||||
@@ -14,16 +11,21 @@ before_install:
|
||||
downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongo --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});"
|
||||
sleep 15
|
||||
|
||||
jdk:
|
||||
- openjdk13
|
||||
- openjdk-ea
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- jdk: openjdk-ea
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- PROFILE=ci
|
||||
- MONGO_VERSION=4.2.0
|
||||
- MONGO_VERSION=4.0.14
|
||||
- MONGO_VERSION=3.6.16
|
||||
global:
|
||||
- MONGO_VERSION=4.0.0
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- oracle-java8-installer
|
||||
- PROFILE=ci
|
||||
|
||||
sudo: false
|
||||
|
||||
@@ -32,4 +34,7 @@ cache:
|
||||
- $HOME/.m2
|
||||
- downloads
|
||||
|
||||
script: "mvn clean dependency:list test -P${PROFILE} -Dsort"
|
||||
install: true
|
||||
|
||||
script:
|
||||
- "./mvnw clean dependency:list test -Pjava11 -Dsort -U"
|
||||
|
||||
337
Jenkinsfile
vendored
337
Jenkinsfile
vendored
@@ -1,123 +1,240 @@
|
||||
pipeline {
|
||||
agent none
|
||||
agent none
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/2.1.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(numToKeepStr: '14'))
|
||||
}
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(numToKeepStr: '14'))
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Test") {
|
||||
when {
|
||||
anyOf {
|
||||
branch '2.1.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: baseline") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
}
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
branch 'issue/*'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 13 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk13-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk13-with-mongodb-4.2.0", "ci/openjdk13-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
stage("test: baseline (jdk8)") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory with docs') {
|
||||
when {
|
||||
branch '2.1.x'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.0 (jdk8)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
stage("test: mongodb 4.2 (jdk8)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage("test: baseline (jdk13)") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk13-with-mongodb-4.2.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
changed {
|
||||
script {
|
||||
slackSend(
|
||||
color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
|
||||
channel: '#spring-data-dev',
|
||||
message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
|
||||
emailext(
|
||||
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
|
||||
mimeType: 'text/html',
|
||||
recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']],
|
||||
body: "<a href=\"${env.BUILD_URL}\">${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch 'master'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
changed {
|
||||
script {
|
||||
slackSend(
|
||||
color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
|
||||
channel: '#spring-data-dev',
|
||||
message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
|
||||
emailext(
|
||||
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
|
||||
mimeType: 'text/html',
|
||||
recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']],
|
||||
body: "<a href=\"${env.BUILD_URL}\">${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
141
README.adoc
141
README.adoc
@@ -50,11 +50,11 @@ public class MyService {
|
||||
|
||||
@Configuration
|
||||
@EnableMongoRepositories
|
||||
class ApplicationConfig extends AbstractMongoConfiguration {
|
||||
class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public MongoClient mongoClient() {
|
||||
return new MongoClient();
|
||||
return MongoClients.create();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -94,6 +94,143 @@ If you'd rather like the latest snapshots of the upcoming major version, use our
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Upgrading from 2.x
|
||||
|
||||
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||
|
||||
=== XML Namespace
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
| Now exposes a `com.mongodb.client.MongoClient`
|
||||
|
||||
| `<mongo:mongo-client replica-set="..." />`
|
||||
| Was a comma delimited list of replica set members (host/port)
|
||||
| Now defines the replica set name. +
|
||||
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
| `<mongo:db-factory writeConcern="..." />`
|
||||
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
|
||||
|===
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Referencing a `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `<mongo:mongo-client credentials="..." />`
|
||||
| `<mongo:mongo-client credential="..." />`
|
||||
| Single authentication data instead of list.
|
||||
|
||||
| `<mongo:client-options />`
|
||||
| `<mongo:client-settings />`
|
||||
| See `com.mongodb.MongoClientSettings` for details.
|
||||
|===
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
|
||||
| `<mongo:db-factory connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:mongo-client connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:client-settings />`
|
||||
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||
|
||||
|===
|
||||
|
||||
=== Java Configuration
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
| `MongoDataIntegrityViolationException`
|
||||
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||
|
||||
| `BulkOperationException`
|
||||
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||
|
||||
| `ReactiveMongoClientFactoryBean`
|
||||
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|
||||
| `ReactiveMongoClientSettingsFactoryBean`
|
||||
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|===
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
| Creating a `com.mongodb.MongoClientSettings`.
|
||||
|
||||
| `AbstractMongoConfiguration`
|
||||
| `AbstractMongoClientConfiguration` +
|
||||
(Available since 2.1)
|
||||
| Using `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `MongoDbFactory#getLegacyDb()`
|
||||
| -
|
||||
| -
|
||||
|
||||
| `SimpleMongoDbFactory`
|
||||
| `SimpleMongoClientDbFactory` +
|
||||
(Available since 2.1)
|
||||
|
|
||||
|
||||
| `MapReduceOptions#getOutputType()`
|
||||
| `MapReduceOptions#getMapReduceAction()`
|
||||
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||
|
||||
| `Meta\|Query` maxScan & snapshot
|
||||
|
|
||||
|
|
||||
|===
|
||||
|
||||
=== Other Changes
|
||||
|
||||
==== UUID Types
|
||||
|
||||
The MongoDB UUID representation can now be configured with different formats.
|
||||
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||
|
||||
.UUID Codec Configuration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
|
||||
9
SECURITY.adoc
Normal file
9
SECURITY.adoc
Normal file
@@ -0,0 +1,9 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly.
|
||||
39
ci/README.adoc
Normal file
39
ci/README.adoc
Normal file
@@ -0,0 +1,39 @@
|
||||
== Running CI tasks locally
|
||||
|
||||
Since Concourse is built on top of Docker, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your `test.sh` script before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what Concourse does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
Next, run the `test.sh` script from inside the container:
|
||||
+
|
||||
2. `PROFILE=none spring-data-mongodb-github/ci/test.sh`
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
+
|
||||
Next, run the `build.sh` script from inside the container:
|
||||
+
|
||||
3. `spring-data-mongodb-github/ci/build.sh`
|
||||
|
||||
IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything.
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
15
ci/openjdk11-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk11-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
15
ci/openjdk13-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk13-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk13:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,14 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
|
||||
RUN RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org=4.0.9 mongodb-org-server=4.0.9 mongodb-org-shell=4.0.9 mongodb-org-mongos=4.0.9 mongodb-org-tools=4.0.9
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2
|
||||
|
||||
RUN apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 4B7C549A058F8B6B
|
||||
|
||||
RUN echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.1 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.1.list
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y mongodb-org-unstable=4.1.13 mongodb-org-unstable-server=4.1.13 mongodb-org-unstable-shell=4.1.13 mongodb-org-unstable-mongos=4.1.13 mongodb-org-unstable-tools=4.1.13
|
||||
|
||||
RUN apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
15
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
47
pom.xml
47
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.16.RELEASE</version>
|
||||
<version>3.0.0.RC1</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,21 +15,20 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.1.16.RELEASE</version>
|
||||
<version>2.3.0.RC1</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
</modules>
|
||||
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.1.16.RELEASE</springdata.commons>
|
||||
<mongo>3.8.2</mongo>
|
||||
<mongo.reactivestreams>1.9.2</mongo.reactivestreams>
|
||||
<springdata.commons>2.3.0.RC1</springdata.commons>
|
||||
<mongo>4.0.1</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -118,45 +117,35 @@
|
||||
<id>benchmarks</id>
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
<module>spring-data-mongodb-benchmarks</module>
|
||||
</modules>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>distribute</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
<!-- MongoDB -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
<version>${mongo}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.16.RELEASE</version>
|
||||
<version>3.0.0.RC1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,6 +87,7 @@
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<testSourceDirectory>${project.build.sourceDirectory}</testSourceDirectory>
|
||||
<testClassesDirectory>${project.build.outputDirectory}</testClassesDirectory>
|
||||
<excludes>
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
@@ -1,148 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.16.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>2.1.1</jpa>
|
||||
<hibernate>5.2.1.Final</hibernate>
|
||||
<java-module-name>spring.data.mongodb.cross.store</java-module-name>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.1.16.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-entitymanager</artifactId>
|
||||
<version>${hibernate}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>1.8.0.10</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
<version>${validation}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.2.4.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>aspectj-maven-plugin</artifactId>
|
||||
<version>1.6</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjtools</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<outxml>true</outxml>
|
||||
<aspectLibraries>
|
||||
<aspectLibrary>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -1,214 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.Filters;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
this.mongoTemplate = mongoTemplate;
|
||||
}
|
||||
|
||||
public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) {
|
||||
this.entityManagerFactory = entityManagerFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
|
||||
throws DataAccessException, NotFoundException {
|
||||
|
||||
if (id == null) {
|
||||
log.debug("Unable to load MongoDB data for null id");
|
||||
return;
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entityClass);
|
||||
|
||||
final Document dbk = new Document();
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
for (Document dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (cs == null) {
|
||||
log.debug("Flush: changeset was null, nothing to flush.");
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
if (mongoTemplate.getCollection(collName) == null) {
|
||||
mongoTemplate.createCollection(collName);
|
||||
}
|
||||
|
||||
for (String key : cs.getValues().keySet()) {
|
||||
if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) {
|
||||
Object value = cs.getValues().get(key);
|
||||
final Document dbQuery = new Document();
|
||||
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
|
||||
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
|
||||
dbQuery.put(ENTITY_FIELD_NAME, key);
|
||||
final Document dbId = mongoTemplate.execute(collName, new CollectionCallback<Document>() {
|
||||
public Document doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
Document id = collection.find(dbQuery).first();
|
||||
return id;
|
||||
}
|
||||
});
|
||||
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
DeleteResult dr = collection.deleteMany(dbQuery);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
final Document dbDoc = new Document();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
if (dbId != null) {
|
||||
dbDoc.put("_id", dbId.get("_id"));
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
if (dbId != null) {
|
||||
collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc);
|
||||
} else {
|
||||
|
||||
if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) {
|
||||
dbDoc.remove("_id");
|
||||
}
|
||||
collection.insertOne(dbDoc);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection the given entity type shall be persisted to.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getCollectionNameForEntity(Class<? extends ChangeSetBacked> entityClass) {
|
||||
return mongoTemplate.getCollectionName(entityClass);
|
||||
}
|
||||
}
|
||||
@@ -1,272 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.Entity;
|
||||
|
||||
import org.aspectj.lang.JoinPoint;
|
||||
import org.aspectj.lang.reflect.FieldSignature;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
import org.springframework.data.mongodb.crossstore.DocumentBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException;
|
||||
import org.springframework.data.crossstore.HashMapChangeSet;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
/**
|
||||
* Aspect to turn an object annotated with @Document into a persistent document using Mongo.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public aspect MongoDocumentBacking {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class);
|
||||
|
||||
// Aspect shared config
|
||||
private ChangeSetPersister<Object> changeSetPersister;
|
||||
|
||||
public void setChangeSetPersister(ChangeSetPersister<Object> changeSetPersister) {
|
||||
this.changeSetPersister = changeSetPersister;
|
||||
}
|
||||
|
||||
// ITD to introduce N state to Annotated objects
|
||||
declare parents : (@Entity *) implements DocumentBacked;
|
||||
|
||||
// The annotated fields that will be persisted in MongoDB rather than with JPA
|
||||
declare @field: @RelatedDocument * (@Entity+ *).*:@Transient;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Advise user-defined constructors of ChangeSetBacked objects to create a new
|
||||
// backing ChangeSet
|
||||
// -------------------------------------------------------------------------
|
||||
pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) :
|
||||
execution((DocumentBacked+).new(..)) &&
|
||||
!execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity);
|
||||
|
||||
pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) :
|
||||
execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity) &&
|
||||
args(cs);
|
||||
|
||||
protected pointcut entityFieldGet(DocumentBacked entity) :
|
||||
get(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
!get(* DocumentBacked.*);
|
||||
|
||||
protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) :
|
||||
set(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
args(newVal) &&
|
||||
!set(* DocumentBacked.*);
|
||||
|
||||
// intercept EntityManager.merge calls
|
||||
public pointcut entityManagerMerge(EntityManager em, Object entity) :
|
||||
call(* EntityManager.merge(Object)) &&
|
||||
target(em) &&
|
||||
args(entity);
|
||||
|
||||
// intercept EntityManager.remove calls
|
||||
// public pointcut entityManagerRemove(EntityManager em, Object entity) :
|
||||
// call(* EntityManager.remove(Object)) &&
|
||||
// target(em) &&
|
||||
// args(entity);
|
||||
|
||||
// move changeSet from detached entity to the newly merged persistent object
|
||||
Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) {
|
||||
Object mergedEntity = proceed(em, entity);
|
||||
if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) {
|
||||
((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet();
|
||||
}
|
||||
return mergedEntity;
|
||||
}
|
||||
|
||||
// clear changeSet from removed entity
|
||||
// Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) {
|
||||
// if (entity instanceof DocumentBacked) {
|
||||
// removeChangeSetValues((DocumentBacked)entity);
|
||||
// }
|
||||
// return proceed(em, entity);
|
||||
// }
|
||||
|
||||
private static void removeChangeSetValues(DocumentBacked entity) {
|
||||
LOGGER.debug("Removing all change-set values for " + entity);
|
||||
ChangeSet nulledCs = new HashMapChangeSet();
|
||||
DocumentBacked documentEntity = (DocumentBacked) entity;
|
||||
@SuppressWarnings("unchecked")
|
||||
ChangeSetPersister<Object> changeSetPersister = (ChangeSetPersister<Object>) documentEntity.itdChangeSetPersister;
|
||||
try {
|
||||
changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(),
|
||||
documentEntity.getChangeSet());
|
||||
} catch (DataAccessException e) {
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
for (String key : entity.getChangeSet().getValues().keySet()) {
|
||||
nulledCs.set(key, null);
|
||||
}
|
||||
entity.setChangeSet(nulledCs);
|
||||
}
|
||||
|
||||
before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) {
|
||||
LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass());
|
||||
// Populate all ITD fields
|
||||
entity.setChangeSet(new HashMapChangeSet());
|
||||
entity.itdChangeSetPersister = changeSetPersister;
|
||||
entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity);
|
||||
// registerTransactionSynchronization(entity);
|
||||
}
|
||||
|
||||
private static void registerTransactionSynchronization(DocumentBacked entity) {
|
||||
if (TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Adding transaction synchronization for " + entity);
|
||||
}
|
||||
TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization already active for " + entity);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization is not active for " + entity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// ChangeSet-related mixins
|
||||
// -------------------------------------------------------------------------
|
||||
// Introduced field
|
||||
@Transient
|
||||
private ChangeSet DocumentBacked.changeSet;
|
||||
|
||||
@Transient
|
||||
private ChangeSetPersister<?> DocumentBacked.itdChangeSetPersister;
|
||||
|
||||
@Transient
|
||||
private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization;
|
||||
|
||||
public void DocumentBacked.setChangeSet(ChangeSet cs) {
|
||||
this.changeSet = cs;
|
||||
}
|
||||
|
||||
public ChangeSet DocumentBacked.getChangeSet() {
|
||||
return changeSet;
|
||||
}
|
||||
|
||||
// Flush the entity state to the persistent store
|
||||
public void DocumentBacked.flush() {
|
||||
Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
itdChangeSetPersister.persistState(this, this.changeSet);
|
||||
}
|
||||
|
||||
public Object DocumentBacked.get_persistent_id() {
|
||||
return itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
}
|
||||
|
||||
// lifecycle methods
|
||||
@javax.persistence.PostPersist
|
||||
public void DocumentBacked.itdPostPersist() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName());
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PreUpdate
|
||||
public void DocumentBacked.itdPreUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostUpdate
|
||||
public void DocumentBacked.itdPostUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostRemove
|
||||
public void DocumentBacked.itdPostRemove() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
removeChangeSetValues(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostLoad
|
||||
public void DocumentBacked.itdPostLoad() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field reads to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity): entityFieldGet(entity) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet());
|
||||
if (entity.getChangeSet().getValues().get(propName) == null) {
|
||||
try {
|
||||
this.changeSetPersister
|
||||
.getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet());
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
}
|
||||
Object fValue = entity.getChangeSet().getValues().get(propName);
|
||||
if (fValue != null) {
|
||||
return fValue;
|
||||
}
|
||||
return proceed(entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field writes to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]");
|
||||
entity.getChangeSet().set(propName, newVal);
|
||||
return proceed(entity, newVal);
|
||||
}
|
||||
|
||||
Field field(JoinPoint joinPoint) {
|
||||
FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature();
|
||||
return fieldSignature.getField();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
/**
|
||||
* Infrastructure for Spring Data's MongoDB cross store support.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
@@ -1,195 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.crossstore.test.Address;
|
||||
import org.springframework.data.mongodb.crossstore.test.Person;
|
||||
import org.springframework.data.mongodb.crossstore.test.Resume;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
/**
|
||||
* Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}).
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml")
|
||||
public class CrossStoreMongoTests {
|
||||
|
||||
@Autowired MongoTemplate mongoTemplate;
|
||||
|
||||
@PersistenceContext EntityManager entityManager;
|
||||
|
||||
@Autowired PlatformTransactionManager transactionManager;
|
||||
TransactionTemplate txTemplate;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
txTemplate = new TransactionTemplate(transactionManager);
|
||||
|
||||
clearData(Person.class);
|
||||
|
||||
Address address = new Address(12, "MAin St.", "Boston", "MA", "02101");
|
||||
|
||||
Resume resume = new Resume();
|
||||
resume.addEducation("Skanstulls High School, 1975");
|
||||
resume.addEducation("Univ. of Stockholm, 1980");
|
||||
resume.addJob("DiMark, DBA, 1990-2000");
|
||||
resume.addJob("VMware, Developer, 2007-");
|
||||
|
||||
final Person person = new Person("Thomas", 20);
|
||||
person.setAddress(address);
|
||||
person.setResume(resume);
|
||||
person.setId(1L);
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.persist(person);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.remove(entityManager.find(Person.class, 1L));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void clearData(Class<?> domainType) {
|
||||
|
||||
String collectionName = mongoTemplate.getCollectionName(domainType);
|
||||
mongoTemplate.dropCollection(collectionName);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testReadJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testUpdatedJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
found.setAge(44);
|
||||
found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006");
|
||||
|
||||
entityManager.merge(found);
|
||||
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; "
|
||||
+ "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeJpaEntityWithMongoDocument() {
|
||||
|
||||
final Person detached = entityManager.find(Person.class, 1L);
|
||||
entityManager.detach(detached);
|
||||
detached.getResume().addJob("TargetRx, Developer, 2000-2005");
|
||||
|
||||
Person merged = txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person result = entityManager.merge(detached);
|
||||
entityManager.flush();
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
final Person updated = entityManager.find(Person.class, 1L);
|
||||
Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveJpaEntityWithMongoDocument() {
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person p2 = new Person("Thomas", 20);
|
||||
Resume r2 = new Resume();
|
||||
r2.addEducation("Skanstulls High School, 1975");
|
||||
r2.addJob("DiMark, DBA, 1990-2000");
|
||||
p2.setResume(r2);
|
||||
p2.setId(2L);
|
||||
entityManager.persist(p2);
|
||||
Person p3 = new Person("Thomas", 20);
|
||||
Resume r3 = new Resume();
|
||||
r3.addEducation("Univ. of Stockholm, 1980");
|
||||
r3.addJob("VMware, Developer, 2007-");
|
||||
p3.setResume(r3);
|
||||
p3.setId(3L);
|
||||
entityManager.persist(p3);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
final Person found2 = entityManager.find(Person.class, 2L);
|
||||
entityManager.remove(found2);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
boolean weFound3 = false;
|
||||
|
||||
for (Document dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) {
|
||||
Assert.assertTrue(!dbo.get("_entity_id").equals(2L));
|
||||
if (dbo.get("_entity_id").equals(3L)) {
|
||||
weFound3 = true;
|
||||
}
|
||||
}
|
||||
Assert.assertTrue(weFound3);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
public class Address {
|
||||
|
||||
private Integer streetNumber;
|
||||
private String streetName;
|
||||
private String city;
|
||||
private String state;
|
||||
private String zip;
|
||||
|
||||
public Address(Integer streetNumber, String streetName, String city, String state, String zip) {
|
||||
super();
|
||||
this.streetNumber = streetNumber;
|
||||
this.streetName = streetName;
|
||||
this.city = city;
|
||||
this.state = state;
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
public Integer getStreetNumber() {
|
||||
return streetNumber;
|
||||
}
|
||||
|
||||
public void setStreetNumber(Integer streetNumber) {
|
||||
this.streetNumber = streetNumber;
|
||||
}
|
||||
|
||||
public String getStreetName() {
|
||||
return streetName;
|
||||
}
|
||||
|
||||
public void setStreetName(String streetName) {
|
||||
this.streetName = streetName;
|
||||
}
|
||||
|
||||
public String getCity() {
|
||||
return city;
|
||||
}
|
||||
|
||||
public void setCity(String city) {
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
public String getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(String state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public String getZip() {
|
||||
return zip;
|
||||
}
|
||||
|
||||
public void setZip(String zip) {
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
Long id;
|
||||
|
||||
private String name;
|
||||
|
||||
private int age;
|
||||
|
||||
private java.util.Date birthDate;
|
||||
|
||||
@RelatedDocument
|
||||
private Address address;
|
||||
|
||||
@RelatedDocument
|
||||
private Resume resume;
|
||||
|
||||
public Person() {
|
||||
}
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
this.birthDate = new java.util.Date();
|
||||
}
|
||||
|
||||
public void birthday() {
|
||||
++age;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
public void setAge(int age) {
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public java.util.Date getBirthDate() {
|
||||
return birthDate;
|
||||
}
|
||||
|
||||
public void setBirthDate(java.util.Date birthDate) {
|
||||
this.birthDate = birthDate;
|
||||
}
|
||||
|
||||
public Resume getResume() {
|
||||
return resume;
|
||||
}
|
||||
|
||||
public void setResume(Resume resume) {
|
||||
this.resume = resume;
|
||||
}
|
||||
|
||||
public Address getAddress() {
|
||||
return address;
|
||||
}
|
||||
|
||||
public void setAddress(Address address) {
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
@Document
|
||||
public class Resume {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(Resume.class);
|
||||
|
||||
@Id
|
||||
private ObjectId id;
|
||||
|
||||
private String education = "";
|
||||
|
||||
private String jobs = "";
|
||||
|
||||
public String getId() {
|
||||
return id.toString();
|
||||
}
|
||||
|
||||
public String getEducation() {
|
||||
return education;
|
||||
}
|
||||
|
||||
public void addEducation(String education) {
|
||||
LOGGER.debug("Adding education " + education);
|
||||
this.education = this.education + (this.education.length() > 0 ? "; " : "") + education;
|
||||
}
|
||||
|
||||
public String getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public void addJob(String job) {
|
||||
LOGGER.debug("Adding job " + job);
|
||||
this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Resume [education=" + education + ", jobs=" + jobs + "]";
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<persistence xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
version="2.0"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd">
|
||||
<persistence-unit name="test" transaction-type="RESOURCE_LOCAL">
|
||||
<provider>org.hibernate.ejb.HibernatePersistence</provider>
|
||||
<class>org.springframework.data.mongodb.crossstore.test.Person</class>
|
||||
<properties>
|
||||
<property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
<!--value='create' to build a new database on each run; value='update' to modify an existing database; value='create-drop' means the same as 'create' but also drops tables when Hibernate closes; value='validate' makes no changes to the database-->
|
||||
<property name="hibernate.hbm2ddl.auto" value="update"/>
|
||||
<property name="hibernate.ejb.naming_strategy" value="org.hibernate.cfg.ImprovedNamingStrategy"/>
|
||||
</properties>
|
||||
</persistence-unit>
|
||||
</persistence>
|
||||
@@ -1,72 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:tx="http://www.springframework.org/schema/tx"
|
||||
xmlns:jdbc="http://www.springframework.org/schema/jdbc"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/jdbc https://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd
|
||||
http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx-3.0.xsd
|
||||
http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
||||
|
||||
<context:spring-configured/>
|
||||
|
||||
<context:component-scan base-package="org.springframework.persistence.mongodb.test">
|
||||
<context:exclude-filter expression="org.springframework.stereotype.Controller" type="annotation"/>
|
||||
</context:component-scan>
|
||||
|
||||
<mongo:mapping-converter/>
|
||||
|
||||
<!-- Mongo config -->
|
||||
<bean id="mongoClient" class="org.springframework.data.mongodb.core.MongoClientFactoryBean">
|
||||
<property name="host" value="localhost"/>
|
||||
<property name="port" value="27017"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoDbFactory" class="org.springframework.data.mongodb.core.SimpleMongoDbFactory">
|
||||
<constructor-arg name="mongoClient" ref="mongoClient"/>
|
||||
<constructor-arg name="databaseName" value="database"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg name="mongoDbFactory" ref="mongoDbFactory"/>
|
||||
<constructor-arg name="mongoConverter" ref="mappingConverter"/>
|
||||
</bean>
|
||||
|
||||
<bean class="org.springframework.data.mongodb.core.MongoExceptionTranslator"/>
|
||||
|
||||
<!-- Mongo aspect config -->
|
||||
<bean class="org.springframework.data.mongodb.crossstore.MongoDocumentBacking"
|
||||
factory-method="aspectOf">
|
||||
<property name="changeSetPersister" ref="mongoChangeSetPersister"/>
|
||||
</bean>
|
||||
<bean id="mongoChangeSetPersister"
|
||||
class="org.springframework.data.mongodb.crossstore.MongoChangeSetPersister">
|
||||
<property name="mongoTemplate" ref="mongoTemplate"/>
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<jdbc:embedded-database id="dataSource" type="HSQL">
|
||||
</jdbc:embedded-database>
|
||||
|
||||
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<tx:annotation-driven mode="aspectj" transaction-manager="transactionManager"/>
|
||||
|
||||
<bean class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean" id="entityManagerFactory">
|
||||
<property name="persistenceUnitName" value="test"/>
|
||||
<property name="dataSource" ref="dataSource"/>
|
||||
<property name="jpaVendorAdapter">
|
||||
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
|
||||
<property name="showSql" value="true"/>
|
||||
<property name="generateDdl" value="true"/>
|
||||
<property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
@@ -1,18 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d %5p %40.40c:%4L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!--
|
||||
<logger name="org.springframework" level="debug" />
|
||||
-->
|
||||
|
||||
<root level="error">
|
||||
<appender-ref ref="console" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
||||
@@ -1,5 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -13,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.16.RELEASE</version>
|
||||
<version>3.0.0.RC1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -31,8 +32,15 @@
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.16.RELEASE</version>
|
||||
<version>3.0.0.RC1</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -65,6 +65,12 @@
|
||||
<artifactId>querydsl-mongodb</artifactId>
|
||||
<version>${querydsl}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongo-java-driver</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -82,32 +88,23 @@
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-sync</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>${mongo.reactivestreams}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>bson</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
@@ -119,14 +116,14 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava-reactive-streams</artifactId>
|
||||
<version>${rxjava-reactive-streams}</version>
|
||||
<optional>true</optional>
|
||||
@@ -253,6 +250,13 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.junit-pioneer</groupId>
|
||||
<artifactId>junit-pioneer</artifactId>
|
||||
<version>0.5.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
@@ -264,20 +268,25 @@
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<scope>test</scope>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-reactor</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -322,6 +331,7 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<useFile>false</useFile>
|
||||
<includes>
|
||||
<include>**/*Tests.java</include>
|
||||
@@ -334,12 +344,6 @@
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
<properties>
|
||||
<property>
|
||||
<name>listener</name>
|
||||
<value>org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener</value>
|
||||
</property>
|
||||
</properties>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
|
||||
@@ -19,9 +19,9 @@ import java.util.List;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.BulkWriteError;
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.BulkWriteResult;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Is thrown when errors occur during bulk operations.
|
||||
@@ -38,12 +38,12 @@ public class BulkOperationException extends DataAccessException {
|
||||
private final BulkWriteResult result;
|
||||
|
||||
/**
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}.
|
||||
* Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}.
|
||||
*
|
||||
* @param message must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
*/
|
||||
public BulkOperationException(String message, BulkWriteException source) {
|
||||
public BulkOperationException(String message, MongoBulkWriteException source) {
|
||||
|
||||
super(message, source);
|
||||
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
/*
|
||||
* Copyright 2010-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Exception being thrown in case we cannot connect to a MongoDB instance.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException {
|
||||
|
||||
private final UserCredentials credentials;
|
||||
private final @Nullable String database;
|
||||
|
||||
private static final long serialVersionUID = 1172099106475265589L;
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
this.database = null;
|
||||
this.credentials = UserCredentials.NO_CREDENTIALS;
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg) {
|
||||
this(msg, null, UserCredentials.NO_CREDENTIALS);
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, @Nullable String database, UserCredentials credentials) {
|
||||
super(msg);
|
||||
this.database = database;
|
||||
this.credentials = credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public UserCredentials getCredentials() {
|
||||
return this.credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the database trying to be accessed.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} from the underlying factory.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getMongoDatabase(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getMongoDatabase().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDatabaseFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase}
|
||||
* instances that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,7 @@ import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDbFactory}. Used for obtaining
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <p />
|
||||
@@ -41,93 +41,94 @@ import com.mongodb.client.MongoDatabase;
|
||||
public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory} using
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory) {
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory}.
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(MongoDbFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory} using
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory) {
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory}.
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from.
|
||||
* @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory,
|
||||
public static MongoDatabase getDatabase(String dbName, MongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDbFactory factory,
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
ClientSession session = doGetSession(factory, sessionSynchronization);
|
||||
|
||||
if (session == null) {
|
||||
return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb();
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
MongoDbFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getDb(dbName) : factoryToUse.getDb();
|
||||
MongoDatabaseFactory factoryToUse = factory.withSession(session);
|
||||
return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the {@link MongoDbFactory} is actually bound to a {@link ClientSession} that has an active transaction, or
|
||||
* if a {@link TransactionSynchronization} has been registered for the {@link MongoDbFactory resource} and if the
|
||||
* associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
* Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active
|
||||
* transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory
|
||||
* resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active
|
||||
* transaction}.
|
||||
*
|
||||
* @param dbFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return {@literal true} if the factory has an ongoing transaction.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
public static boolean isTransactionActive(MongoDbFactory dbFactory) {
|
||||
public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) {
|
||||
|
||||
if (dbFactory.isTransactionActive()) {
|
||||
return true;
|
||||
@@ -138,7 +139,8 @@ public class MongoDatabaseUtils {
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
private static ClientSession doGetSession(MongoDatabaseFactory dbFactory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory);
|
||||
|
||||
@@ -169,7 +171,7 @@ public class MongoDatabaseUtils {
|
||||
return resourceHolder.getSession();
|
||||
}
|
||||
|
||||
private static ClientSession createClientSession(MongoDbFactory dbFactory) {
|
||||
private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
@@ -184,7 +186,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
private final MongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDbFactory dbFactory) {
|
||||
MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory);
|
||||
this.resourceHolder = resourceHolder;
|
||||
|
||||
@@ -15,14 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.MongoExceptionTranslator;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -31,92 +25,33 @@ import com.mongodb.client.MongoDatabase;
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||
*/
|
||||
public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvider {
|
||||
@Deprecated
|
||||
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||
*/
|
||||
MongoDatabase getDb() throws DataAccessException;
|
||||
@Deprecated
|
||||
default MongoDatabase getDb() throws DataAccessException {
|
||||
return getMongoDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link DB} instance to access the database with the given name.
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
* @throws DataAccessException
|
||||
*/
|
||||
MongoDatabase getDb(String dbName) throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Exposes a shared {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
PersistenceExceptionTranslator getExceptionTranslator();
|
||||
|
||||
/**
|
||||
* Get the legacy database entry point. Please consider {@link #getDb()} instead.
|
||||
*
|
||||
* @return
|
||||
* @deprecated since 2.1, use {@link #getDb()}. This method will be removed with a future version as it works only
|
||||
* with the legacy MongoDB driver.
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
DB getLegacyDb();
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CodecRegistry} used by the MongoDB Java driver.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Override
|
||||
default CodecRegistry getCodecRegistry() {
|
||||
return getDb().getCodecRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} for given ClientSessionOptions.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
ClientSession getSession(ClientSessionOptions options);
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to a new session with given {@link ClientSessionOptions options}.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
default MongoDbFactory withSession(ClientSessionOptions options) {
|
||||
return withSession(getSession(options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances
|
||||
* that are aware and bound to the given session.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.1
|
||||
*/
|
||||
MongoDbFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link MongoDbFactory} is bound to a {@link ClientSession} that has an
|
||||
* {@link ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.1.3
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return getMongoDatabase(dbName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,15 +36,15 @@ import com.mongodb.client.ClientSession;
|
||||
class MongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private MongoDbFactory dbFactory;
|
||||
private MongoDatabaseFactory dbFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
* @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
*/
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDbFactory dbFactory) {
|
||||
MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -75,9 +75,9 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link MongoDbFactory}.
|
||||
* @return the associated {@link MongoDatabaseFactory}.
|
||||
*/
|
||||
public MongoDbFactory getDbFactory() {
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -36,17 +36,18 @@ import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDbFactory}.
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDbFactory} to the thread.
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call.
|
||||
* Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
@@ -58,46 +59,46 @@ import com.mongodb.client.ClientSession;
|
||||
* @currentRead Shadow's Edge - Brent Weeks
|
||||
* @since 2.1
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
implements ResourceTransactionManager, InitializingBean {
|
||||
|
||||
private @Nullable MongoDbFactory dbFactory;
|
||||
private @Nullable MongoDatabaseFactory dbFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDbFactory db factory} has to be {@link #setDbFactory(MongoDbFactory) set}
|
||||
* before using the instance. Use this constructor to prepare a {@link MongoTransactionManager} via a
|
||||
* {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDbFactory(MongoDbFactory)
|
||||
* @see #setDbFactory(MongoDatabaseFactory)
|
||||
* @see #setTransactionSynchronization(int)
|
||||
*/
|
||||
public MongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory}.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory) {
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory) {
|
||||
this(dbFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory} applying the
|
||||
* given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
* Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}
|
||||
* applying the given {@link TransactionOptions options}, if present, when starting a new transaction.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public MongoTransactionManager(MongoDbFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
|
||||
@@ -295,11 +296,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoDbFactory} that this instance should manage transactions for.
|
||||
* Set the {@link MongoDatabaseFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param dbFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDbFactory(MongoDbFactory dbFactory) {
|
||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
this.dbFactory = dbFactory;
|
||||
@@ -315,12 +316,12 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link MongoDbFactory} that this instance manages transactions for.
|
||||
* Get the {@link MongoDatabaseFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public MongoDbFactory getDbFactory() {
|
||||
public MongoDatabaseFactory getDbFactory() {
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
@@ -329,7 +330,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory getResourceFactory() {
|
||||
public MongoDatabaseFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
@@ -344,7 +345,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) {
|
||||
|
||||
MongoDbFactory dbFactory = getResourceFactory();
|
||||
MongoDatabaseFactory dbFactory = getResourceFactory();
|
||||
|
||||
MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory);
|
||||
resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition));
|
||||
@@ -355,7 +356,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #dbFactory} is {@literal null}.
|
||||
*/
|
||||
private MongoDbFactory getRequiredDbFactory() {
|
||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
@@ -45,7 +44,7 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
MongoDatabase getMongoDatabase() throws DataAccessException;
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoDatabase} instance to access the database with the given name.
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
@@ -88,4 +87,16 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveMongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.2
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,278 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.context.Context;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.NoTransactionException;
|
||||
import org.springframework.transaction.reactive.ReactiveResourceSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a
|
||||
* {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the
|
||||
* {@link ReactiveMongoDatabaseFactory resource} and if the associated
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param databaseFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction.
|
||||
*/
|
||||
public static Mono<Boolean> isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
if (databaseFactory.isTransactionActive()) {
|
||||
return Mono.just(true);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction() //
|
||||
.map(it -> {
|
||||
|
||||
ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory);
|
||||
return holder != null && holder.hasActiveTransaction();
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> Mono.just(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> doGetSession(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(dbFactory);
|
||||
|
||||
// check for native MongoDB transaction
|
||||
if (registeredHolder != null
|
||||
&& (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) {
|
||||
|
||||
return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession())
|
||||
: createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent);
|
||||
}
|
||||
|
||||
if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
// init a non native MongoDB transaction by registering a MongoSessionSynchronization
|
||||
return createClientSession(dbFactory).map(session -> {
|
||||
|
||||
ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory);
|
||||
newHolder.getRequiredSession().startTransaction();
|
||||
|
||||
synchronizationManager
|
||||
.registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory));
|
||||
newHolder.setSynchronizedWithTransaction(true);
|
||||
synchronizationManager.bindResource(dbFactory, newHolder);
|
||||
|
||||
return newHolder.getSession();
|
||||
});
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> createClientSession(ReactiveMongoDatabaseFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when
|
||||
* participating in a non-native MongoDB transaction, such as a R2CBC transaction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
private static class MongoSessionSynchronization
|
||||
extends ReactiveResourceSynchronization<ReactiveMongoResourceHolder, Object> {
|
||||
|
||||
private final ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory, synchronizationManager);
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (isTransactionActive(resourceHolder)) {
|
||||
return Mono.from(resourceHolder.getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> afterCompletion(int status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) {
|
||||
|
||||
return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) //
|
||||
.then(super.afterCompletion(status));
|
||||
}
|
||||
|
||||
return super.afterCompletion(status);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
if (resourceHolder.hasActiveSession()) {
|
||||
resourceHolder.getRequiredSession().close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (!resourceHolder.hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return resourceHolder.getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.support.ResourceHolderSupport;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoTransactionManager
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private ReactiveMongoDatabaseFactory databaseFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ClientSession}. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
ClientSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the required associated {@link ClientSession}.
|
||||
* @throws IllegalStateException if no session is associated.
|
||||
*/
|
||||
ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No ClientSession associated");
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ReactiveMongoDatabaseFactory}.
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClientSession} to guard.
|
||||
*
|
||||
* @param session can be {@literal null}.
|
||||
*/
|
||||
public void setSession(@Nullable ClientSession session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if session is not {@literal null}.
|
||||
*/
|
||||
boolean hasSession() {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public ClientSession setSessionIfAbsent(@Nullable ClientSession session) {
|
||||
|
||||
if (!hasSession()) {
|
||||
setSession(session);
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session is active and has not been closed.
|
||||
*/
|
||||
boolean hasActiveSession() {
|
||||
|
||||
if (!hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hasServerSession() && !getRequiredSession().getServerSession().isClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session has an active transaction.
|
||||
* @see #hasActiveSession()
|
||||
*/
|
||||
boolean hasActiveTransaction() {
|
||||
|
||||
if (!hasActiveSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated
|
||||
* that is accessible via {@link ClientSession#getServerSession()}.
|
||||
*/
|
||||
boolean hasServerSession() {
|
||||
|
||||
try {
|
||||
return getRequiredSession().getServerSession() != null;
|
||||
} catch (IllegalStateException serverSessionClosed) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,530 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionException;
|
||||
import org.springframework.transaction.TransactionSystemException;
|
||||
import org.springframework.transaction.reactive.AbstractReactiveTransactionManager;
|
||||
import org.springframework.transaction.reactive.GenericReactiveTransaction;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.SmartTransactionObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.TransactionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
* behavior as outlined in the MongoDB reference manual.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean {
|
||||
|
||||
private @Nullable ReactiveMongoDatabaseFactory databaseFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDatabaseFactory(ReactiveMongoDatabaseFactory)
|
||||
*/
|
||||
public ReactiveMongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory}.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
this(databaseFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when
|
||||
* starting a new transaction.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
@Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
|
||||
this.databaseFactory = databaseFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||
throws TransactionException {
|
||||
|
||||
ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(getRequiredDatabaseFactory());
|
||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||
TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
|
||||
Mono<ReactiveMongoResourceHolder> holder = newResourceHolder(definition,
|
||||
ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
|
||||
return holder.doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.setResourceHolder(resourceHolder);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
}).doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.startTransaction(options);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
})//
|
||||
.onErrorMap(
|
||||
ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex))
|
||||
.doOnSuccess(resourceHolder -> {
|
||||
|
||||
synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder);
|
||||
}).then();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||
throws TransactionException {
|
||||
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
mongoTransactionObject.setResourceHolder(null);
|
||||
|
||||
return synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||
Object suspendedResources) {
|
||||
return Mono
|
||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to commit transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> {
|
||||
return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook to perform an actual commit of the given transaction.<br />
|
||||
* If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding
|
||||
* {@literal error labels}. <br />
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit.
|
||||
*
|
||||
* @param synchronizationManager reactive synchronization manager.
|
||||
* @param transactionObject never {@literal null}.
|
||||
*/
|
||||
protected Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoTransactionObject transactionObject) {
|
||||
return transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to abort transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> {
|
||||
return Mono
|
||||
.error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status);
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||
Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction;
|
||||
|
||||
// Remove the connection holder from the thread.
|
||||
synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
mongoTransactionObject.getRequiredResourceHolder().clear();
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to release Session %s after transaction.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
mongoTransactionObject.closeSession();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link TransactionOptions} to be applied when starting transactions.
|
||||
*
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public void setOptions(@Nullable TransactionOptions options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDatabaseFactory();
|
||||
}
|
||||
|
||||
private Mono<ReactiveMongoResourceHolder> newResourceHolder(TransactionDefinition definition,
|
||||
ClientSessionOptions options) {
|
||||
|
||||
ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory();
|
||||
|
||||
return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory));
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #databaseFactory} is {@literal null}.
|
||||
*/
|
||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||
|
||||
Assert.state(databaseFactory != null,
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) transaction;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(),
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
status.getTransaction().getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) status.getTransaction();
|
||||
}
|
||||
|
||||
private static String debugString(@Nullable ClientSession session) {
|
||||
|
||||
if (session == null) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()),
|
||||
Integer.toHexString(session.hashCode()));
|
||||
|
||||
try {
|
||||
if (session.getServerSession() != null) {
|
||||
debugString += String.format("id = %s, ", session.getServerSession().getIdentifier());
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
debugString += String.format("error = %s", e.getMessage());
|
||||
}
|
||||
|
||||
debugString += "]";
|
||||
|
||||
return debugString;
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by
|
||||
* {@link ReactiveMongoTransactionManager}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoResourceHolder
|
||||
*/
|
||||
protected static class ReactiveMongoTransactionObject implements SmartTransactionObject {
|
||||
|
||||
private @Nullable ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoResourceHolder}.
|
||||
*
|
||||
* @param resourceHolder can be {@literal null}.
|
||||
*/
|
||||
void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a {@link MongoResourceHolder} is set.
|
||||
*/
|
||||
final boolean hasResourceHolder() {
|
||||
return resourceHolder != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a MongoDB transaction optionally given {@link TransactionOptions}.
|
||||
*
|
||||
* @param options can be {@literal null}
|
||||
*/
|
||||
void startTransaction(@Nullable TransactionOptions options) {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (options != null) {
|
||||
session.startTransaction(options);
|
||||
} else {
|
||||
session.startTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction.
|
||||
*/
|
||||
public Mono<Void> commitTransaction() {
|
||||
return Mono.from(getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback (abort) the transaction.
|
||||
*/
|
||||
public Mono<Void> abortTransaction() {
|
||||
return Mono.from(getRequiredSession().abortTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a {@link ClientSession} without regard to its transactional state.
|
||||
*/
|
||||
void closeSession() {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (session.getServerSession() != null && !session.getServerSession().isClosed()) {
|
||||
session.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public ClientSession getSession() {
|
||||
return resourceHolder != null ? resourceHolder.getSession() : null;
|
||||
}
|
||||
|
||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
throw new UnsupportedOperationException("flush() not supported");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoDriverInformation;
|
||||
|
||||
/**
|
||||
* Class that exposes the SpringData MongoDB specific information like the current {@link Version} or
|
||||
* {@link MongoDriverInformation driver information}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
.builder(MongoDriverInformation.builder().build()).driverName("spring-data").build();
|
||||
|
||||
/**
|
||||
* Obtain the SpringData MongoDB specific driver information.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static MongoDriverInformation driverInformation() {
|
||||
return DRIVER_INFORMATION;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <p />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static Version version() {
|
||||
|
||||
Package pkg = SpringDataMongoDB.class.getPackage();
|
||||
String versionString = (pkg != null ? pkg.getImplementationVersion() : null);
|
||||
|
||||
if (!StringUtils.hasText(versionString)) {
|
||||
|
||||
LOGGER.debug("Unable to find Spring Data MongoDB version.");
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString);
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -17,17 +17,22 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}.
|
||||
@@ -35,40 +40,44 @@ import com.mongodb.client.MongoClient;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoConfiguration
|
||||
*/
|
||||
@Configuration
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
*
|
||||
* @return
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
public MongoClient mongoClient() {
|
||||
return createMongoClient(mongoClientSettings());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
* @see #mongoDbFactory()
|
||||
* @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) {
|
||||
return new MongoTemplate(databaseFactory, converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
* Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the
|
||||
* {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
* @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter)
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName());
|
||||
public MongoDatabaseFactory mongoDbFactory() {
|
||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -91,21 +100,32 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory);
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given
|
||||
* {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.MongoClient}.
|
||||
* <p />
|
||||
* <strong>INFO:</strong>In case you want to use {@link com.mongodb.client.MongoClients} for configuration please refer
|
||||
* to {@link AbstractMongoClientConfiguration}.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Ryan Tenney
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see MongoConfigurationSupport
|
||||
* @see AbstractMongoClientConfiguration
|
||||
*/
|
||||
@Configuration
|
||||
public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a
|
||||
* {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public abstract MongoClient mongoClient();
|
||||
|
||||
/**
|
||||
* Creates a {@link MongoTemplate}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoTemplate mongoTemplate() throws Exception {
|
||||
return new MongoTemplate(mongoDbFactory(), mappingMongoConverter());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient}
|
||||
* instance configured in {@link #mongoClient()}.
|
||||
*
|
||||
* @see #mongoClient()
|
||||
* @see #mongoTemplate()
|
||||
* @return
|
||||
*/
|
||||
@Bean
|
||||
public MongoDbFactory mongoDbFactory() {
|
||||
return new SimpleMongoDbFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoDbFactory()
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
|
||||
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -18,13 +18,19 @@ package org.springframework.data.mongodb.config;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
/**
|
||||
* Base class for reactive Spring Data MongoDB configuration using JavaConfig.
|
||||
@@ -34,25 +40,33 @@ import com.mongodb.reactivestreams.client.MongoClient;
|
||||
* @since 2.0
|
||||
* @see MongoConfigurationSupport
|
||||
*/
|
||||
@Configuration
|
||||
@Configuration(proxyBeanMethods = false)
|
||||
public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
|
||||
* to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. <br />
|
||||
* Override {@link #mongoClientSettings()} to configure connection details.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @see #mongoClientSettings()
|
||||
* @see #configureClientSettings(Builder)
|
||||
*/
|
||||
public abstract MongoClient reactiveMongoClient();
|
||||
public MongoClient reactiveMongoClient() {
|
||||
return createReactiveMongoClient(mongoClientSettings());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link ReactiveMongoOperations}.
|
||||
*
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public ReactiveMongoOperations reactiveMongoTemplate() throws Exception {
|
||||
return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter());
|
||||
public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MappingMongoConverter mongoConverter) {
|
||||
return new ReactiveMongoTemplate(databaseFactory, mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -60,7 +74,7 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
* {@link MongoClient} instance configured in {@link #reactiveMongoClient()}.
|
||||
*
|
||||
* @see #reactiveMongoClient()
|
||||
* @see #reactiveMongoTemplate()
|
||||
* @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter)
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
@@ -70,20 +84,31 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Bean
|
||||
public MappingMongoConverter mappingMongoConverter() throws Exception {
|
||||
public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
MongoCustomConversions customConversions, MongoMappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext());
|
||||
converter.setCustomConversions(customConversions());
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(customConversions);
|
||||
converter.setCodecRegistryProvider(databaseFactory);
|
||||
|
||||
return converter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClient createReactiveMongoClient(MongoClientSettings settings) {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ConnectionString;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link com.mongodb.ConnectionString}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String connectionString) {
|
||||
|
||||
if (!StringUtils.hasText(connectionString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ConnectionString(connectionString));
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,6 @@ import org.springframework.core.type.filter.AssignableTypeFilter;
|
||||
import org.springframework.core.type.filter.TypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
@@ -97,13 +96,14 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE);
|
||||
id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME;
|
||||
|
||||
String autoIndexCreation = element.getAttribute("auto-index-creation");
|
||||
boolean autoIndexCreationEnabled = StringUtils.hasText(autoIndexCreation) && Boolean.valueOf(autoIndexCreation);
|
||||
|
||||
parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element));
|
||||
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id);
|
||||
|
||||
createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element);
|
||||
|
||||
// Need a reference to a Mongo instance
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
if (!StringUtils.hasText(dbFactoryRef)) {
|
||||
@@ -202,6 +202,11 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) {
|
||||
return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false);
|
||||
}
|
||||
|
||||
public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext,
|
||||
@Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) {
|
||||
|
||||
String ctxRef = element.getAttribute("mapping-context-ref");
|
||||
|
||||
@@ -229,6 +234,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition);
|
||||
}
|
||||
|
||||
mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation);
|
||||
|
||||
parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder);
|
||||
|
||||
ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME
|
||||
@@ -348,20 +355,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context,
|
||||
Element element) {
|
||||
|
||||
BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(MappingContextIsNewStrategyFactory.class);
|
||||
mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef);
|
||||
|
||||
BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context);
|
||||
context.registerBeanComponent(
|
||||
builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY_BEAN_NAME));
|
||||
|
||||
return IS_NEW_STRATEGY_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches.
|
||||
*
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
@@ -26,25 +27,33 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on
|
||||
* an entity.
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information
|
||||
* on an entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getBeanClass(Element element) {
|
||||
return AuditingEventListener.class;
|
||||
return AuditingEntityCallback.class;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -80,7 +89,24 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
mappingContextRef);
|
||||
parser.parse(element, parserContext);
|
||||
|
||||
builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element)));
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element));
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler,
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry,
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,17 +32,23 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
@@ -104,12 +110,27 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEventListener.class);
|
||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||
listenerBeanDefinitionBuilder
|
||||
.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(registry, auditingHandlerDefinition.getSource());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -50,10 +50,11 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
|
||||
ParsingUtils.setPropertyValue(builder, element, "port", "port");
|
||||
ParsingUtils.setPropertyValue(builder, element, "host", "host");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials");
|
||||
ParsingUtils.setPropertyValue(builder, element, "credential", "credential");
|
||||
ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet");
|
||||
ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString");
|
||||
|
||||
MongoParsingUtils.parseMongoClientOptions(element, builder);
|
||||
MongoParsingUtils.parseReplicaSet(element, builder);
|
||||
MongoParsingUtils.parseMongoClientSettings(element, builder);
|
||||
|
||||
String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME;
|
||||
|
||||
@@ -62,22 +63,34 @@ public class MongoClientParser implements BeanDefinitionParser {
|
||||
BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId);
|
||||
parserContext.registerBeanComponent(mongoComponent);
|
||||
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils
|
||||
.getServerAddressPropertyEditorBuilder());
|
||||
BeanComponentDefinition connectionStringPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(connectionStringPropertyEditor);
|
||||
|
||||
BeanComponentDefinition serverAddressPropertyEditor = helper
|
||||
.getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(serverAddressPropertyEditor);
|
||||
|
||||
BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils
|
||||
.getWriteConcernPropertyEditorBuilder());
|
||||
BeanComponentDefinition writeConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(writeConcernEditor);
|
||||
|
||||
BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils
|
||||
.getReadPreferencePropertyEditorBuilder());
|
||||
BeanComponentDefinition readConcernEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readConcernEditor);
|
||||
|
||||
BeanComponentDefinition readPreferenceEditor = helper
|
||||
.getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder());
|
||||
parserContext.registerBeanComponent(readPreferenceEditor);
|
||||
|
||||
BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils
|
||||
.getMongoCredentialPropertyEditor());
|
||||
BeanComponentDefinition credentialsEditor = helper
|
||||
.getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor());
|
||||
parserContext.registerBeanComponent(credentialsEditor);
|
||||
|
||||
BeanComponentDefinition uuidRepresentationEditor = helper
|
||||
.getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder());
|
||||
parserContext.registerBeanComponent(uuidRepresentationEditor);
|
||||
|
||||
parserContext.popAndRegisterContainingComponent();
|
||||
|
||||
return mongoComponent.getBeanDefinition();
|
||||
|
||||
@@ -15,12 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
@@ -28,20 +28,19 @@ import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.context.PersistentEntities;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
|
||||
/**
|
||||
* Base class for Spring Data MongoDB to be extended for JavaConfiguration usage.
|
||||
*
|
||||
@@ -81,39 +80,44 @@ public abstract class MongoConfigurationSupport {
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext() throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}.
|
||||
*
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException {
|
||||
|
||||
return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(
|
||||
new PersistentEntities(Arrays.<MappingContext<?, ?>> asList(new MappingContext[] { mongoMappingContext() }))));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
* {@link #mongoMappingContext()}. Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
* {@link CustomConversions} will be registered with the
|
||||
* {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link #mongoMappingContext()}.
|
||||
* Returns an empty {@link MongoCustomConversions} instance by default.
|
||||
* <p>
|
||||
* <strong>NOTE:</strong> Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB
|
||||
* native simple types and register custom {@link Converter converters}.
|
||||
*
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
@Bean
|
||||
public CustomConversions customConversions() {
|
||||
return new MongoCustomConversions(Collections.emptyList());
|
||||
public MongoCustomConversions customConversions() {
|
||||
return MongoCustomConversions.create(this::configureConverters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration hook for {@link MongoCustomConversions} creation.
|
||||
*
|
||||
* @param converterConfigurationAdapter never {@literal null}.
|
||||
* @since 2.3
|
||||
* @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs()
|
||||
* @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs()
|
||||
*/
|
||||
protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -190,4 +194,40 @@ public abstract class MongoConfigurationSupport {
|
||||
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
|
||||
: PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}. <br />
|
||||
* Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings mongoClientSettings() {
|
||||
|
||||
MongoClientSettings.Builder builder = MongoClientSettings.builder();
|
||||
configureClientSettings(builder);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure {@link MongoClientSettings} via its {@link Builder} API.
|
||||
*
|
||||
* @param builder never {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
protected void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@@ -26,6 +27,7 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
@@ -78,12 +80,23 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
} else if ("MONGODB-CR".equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
|
||||
Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class,
|
||||
"createMongoCRCredential", String.class, String.class, char[].class);
|
||||
|
||||
if (createCRCredentialMethod == null) {
|
||||
throw new IllegalArgumentException("MONGODB-CR is no longer supported.");
|
||||
}
|
||||
|
||||
MongoCredential credential = MongoCredential.class
|
||||
.cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
credentials.add(credential);
|
||||
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
|
||||
@@ -32,14 +32,12 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.MongoClientFactoryBean;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
|
||||
import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import com.mongodb.Mongo;
|
||||
import com.mongodb.MongoClientURI;
|
||||
import com.mongodb.MongoURI;
|
||||
import com.mongodb.ConnectionString;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s.
|
||||
@@ -84,10 +82,11 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
// Common setup
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class);
|
||||
BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(SimpleMongoClientDatabaseFactory.class);
|
||||
setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern");
|
||||
|
||||
BeanDefinition mongoUri = getMongoUri(element, parserContext);
|
||||
BeanDefinition mongoUri = getConnectionString(element, parserContext);
|
||||
|
||||
if (mongoUri != null) {
|
||||
|
||||
@@ -97,7 +96,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext);
|
||||
|
||||
String mongoRef = element.getAttribute("mongo-ref");
|
||||
String mongoRef = element.getAttribute("mongo-client-ref");
|
||||
|
||||
String dbname = element.getAttribute("dbname");
|
||||
|
||||
// Defaulting
|
||||
@@ -119,8 +119,8 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the
|
||||
* {@link Mongo} instance was registered under.
|
||||
* Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the
|
||||
* name under which the {@link com.mongodb.client.MongoClient} instance was registered under.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param parserContext must not be {@literal null}.
|
||||
@@ -136,8 +136,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured
|
||||
* attributes. <br />
|
||||
* Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes. <br />
|
||||
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except
|
||||
* {@literal write-concern} and/or {@literal id}.
|
||||
*
|
||||
@@ -146,11 +145,19 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
* @return {@literal null} in case no client-/uri defined.
|
||||
*/
|
||||
@Nullable
|
||||
private BeanDefinition getMongoUri(Element element, ParserContext parserContext) {
|
||||
private BeanDefinition getConnectionString(Element element, ParserContext parserContext) {
|
||||
|
||||
boolean hasClientUri = element.hasAttribute("client-uri");
|
||||
String type = null;
|
||||
|
||||
if (!hasClientUri && !element.hasAttribute("uri")) {
|
||||
if (element.hasAttribute("client-uri")) {
|
||||
type = "client-uri";
|
||||
} else if (element.hasAttribute("connection-string")) {
|
||||
type = "connection-string";
|
||||
} else if (element.hasAttribute("uri")) {
|
||||
type = "uri";
|
||||
}
|
||||
|
||||
if (!StringUtils.hasText(type)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -164,16 +171,12 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error(
|
||||
"Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!",
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
Class<?> type = MongoClientURI.class;
|
||||
String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type);
|
||||
builder.addConstructorArgValue(uri);
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class);
|
||||
builder.addConstructorArgValue(element.getAttribute(type));
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -35,6 +35,7 @@ import org.w3c.dom.Element;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class MongoParsingUtils {
|
||||
@@ -42,59 +43,78 @@ abstract class MongoParsingUtils {
|
||||
private MongoParsingUtils() {}
|
||||
|
||||
/**
|
||||
* Parses the mongo replica-set element.
|
||||
*
|
||||
* @param parserContext the parser context
|
||||
* @param element the mongo element
|
||||
* @param mongoBuilder the bean definition builder to populate
|
||||
* @return
|
||||
*/
|
||||
static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) {
|
||||
setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds");
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper
|
||||
* Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper
|
||||
* attributes.
|
||||
*
|
||||
* @param element must not be {@literal null}.
|
||||
* @param mongoClientBuilder must not be {@literal null}.
|
||||
*
|
||||
* @param element
|
||||
* @param mongoClientBuilder
|
||||
* @return
|
||||
* @since 1.7
|
||||
* @since 3.0
|
||||
*/
|
||||
public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) {
|
||||
|
||||
Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options");
|
||||
|
||||
if (optionsElement == null) {
|
||||
Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings");
|
||||
if (settingsElement == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoClientOptionsFactoryBean.class);
|
||||
.genericBeanDefinition(MongoClientSettingsFactoryBean.class);
|
||||
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier",
|
||||
"threadsAllowedToBlockForConnectionMultiplier");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation");
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition());
|
||||
// SocketSettings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize");
|
||||
|
||||
// Server Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency",
|
||||
"serverHeartbeatFrequencyMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency",
|
||||
"serverMinHeartbeatFrequencyMS");
|
||||
|
||||
// Cluster Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout",
|
||||
"clusterServerSelectionTimeoutMS");
|
||||
|
||||
// Connection Pool Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time",
|
||||
"poolMaxConnectionLifeTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time",
|
||||
"poolMaxConnectionIdleTimeMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay",
|
||||
"poolMaintenanceInitialDelayMS");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency",
|
||||
"poolMaintenanceFrequencyMS");
|
||||
|
||||
// SSL Settings
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed",
|
||||
"sslInvalidHostNameAllowed");
|
||||
setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider");
|
||||
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -116,6 +136,24 @@ abstract class MongoParsingUtils {
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ReadConcernPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* One should only register one bean definition but want to have the convenience of using
|
||||
* AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the
|
||||
@@ -123,7 +161,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() {
|
||||
|
||||
Map<String, String> customEditors = new ManagedMap<String, String>();
|
||||
Map<String, String> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ServerAddress[]",
|
||||
"org.springframework.data.mongodb.config.ServerAddressPropertyEditor");
|
||||
|
||||
@@ -141,7 +179,7 @@ abstract class MongoParsingUtils {
|
||||
*/
|
||||
static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<String, Class<?>>();
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
@@ -167,4 +205,41 @@ abstract class MongoParsingUtils {
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a
|
||||
* {@link ConnectionStringPropertyEditor}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.0
|
||||
*/
|
||||
static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() {
|
||||
|
||||
Map<String, Class<?>> customEditors = new ManagedMap<>();
|
||||
customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class);
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class);
|
||||
builder.addPropertyValue("customEditors", customEditors);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadConcernLevel;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the
|
||||
* {@link ReadConcernLevel#fromString(String)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readConcernString) {
|
||||
|
||||
if (!StringUtils.hasText(readConcernString)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString)));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Parse a {@link String} to a {@link UuidRepresentation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.0
|
||||
*/
|
||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String value) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setValue(UuidRepresentation.valueOf(value));
|
||||
}
|
||||
}
|
||||
@@ -34,7 +34,7 @@ import com.mongodb.WriteConcern;
|
||||
public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/**
|
||||
* Parse a string to a List<ServerAddress>
|
||||
* Parse a string to a {@link WriteConcern}.
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String writeConcernString) {
|
||||
@@ -51,6 +51,5 @@ public class WriteConcernPropertyEditor extends PropertyEditorSupport {
|
||||
// pass on the string to the constructor
|
||||
setValue(new WriteConcern(writeConcernString));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Minsu Kim
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
@@ -135,6 +136,29 @@ public interface BulkOperations {
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
|
||||
@@ -20,6 +20,7 @@ import lombok.EqualsAndHashCode;
|
||||
import java.time.Instant;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -84,8 +85,19 @@ public class ChangeStreamEvent<T> {
|
||||
@Nullable
|
||||
public Instant getTimestamp() {
|
||||
|
||||
return raw != null && raw.getClusterTime() != null
|
||||
? converter.getConversionService().convert(raw.getClusterTime(), Instant.class) : null;
|
||||
return getBsonTimestamp() != null ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class)
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getClusterTime() cluster time}.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public BsonTimestamp getBsonTimestamp() {
|
||||
return raw != null ? raw.getClusterTime() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,12 +21,17 @@ import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
@@ -47,7 +52,8 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
|
||||
protected ChangeStreamOptions() {}
|
||||
|
||||
@@ -83,7 +89,31 @@ public class ChangeStreamOptions {
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Instant> getResumeTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp);
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<BsonTimestamp> getResumeBsonTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isStartAfter() {
|
||||
return Resume.START_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isResumeAfter() {
|
||||
return Resume.RESUME_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -103,6 +133,48 @@ public class ChangeStreamOptions {
|
||||
return new ChangeStreamOptionsBuilder();
|
||||
}
|
||||
|
||||
private static <T> T asTimestampOfType(Object timestamp, Class<T> targetType) {
|
||||
return targetType.cast(doGetTimestamp(timestamp, targetType));
|
||||
}
|
||||
|
||||
private static <T> Object doGetTimestamp(Object timestamp, Class<T> targetType) {
|
||||
|
||||
if (ClassUtils.isAssignableValue(targetType, timestamp)) {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
if (timestamp instanceof Instant) {
|
||||
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||
}
|
||||
|
||||
if (timestamp instanceof BsonTimestamp) {
|
||||
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
enum Resume {
|
||||
|
||||
UNDEFINED,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument)
|
||||
*/
|
||||
START_AFTER,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument)
|
||||
*/
|
||||
RESUME_AFTER
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@link ChangeStreamOptions}.
|
||||
*
|
||||
@@ -115,7 +187,8 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
|
||||
private ChangeStreamOptionsBuilder() {}
|
||||
|
||||
@@ -183,6 +256,11 @@ public class ChangeStreamOptions {
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||
|
||||
this.resumeToken = resumeToken;
|
||||
|
||||
if (this.resume == Resume.UNDEFINED) {
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -224,6 +302,51 @@ public class ChangeStreamOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
* @param resumeTimestamp must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to continue emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to start emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.START_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the built {@link ChangeStreamOptions}
|
||||
*/
|
||||
@@ -231,11 +354,12 @@ public class ChangeStreamOptions {
|
||||
|
||||
ChangeStreamOptions options = new ChangeStreamOptions();
|
||||
|
||||
options.filter = filter;
|
||||
options.resumeToken = resumeToken;
|
||||
options.fullDocumentLookup = fullDocumentLookup;
|
||||
options.collation = collation;
|
||||
options.resumeTimestamp = resumeTimestamp;
|
||||
options.filter = this.filter;
|
||||
options.resumeToken = this.resumeToken;
|
||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||
options.collation = this.collation;
|
||||
options.resumeTimestamp = this.resumeTimestamp;
|
||||
options.resume = this.resume;
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,227 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Value object representing a count query. Count queries using {@code $near} or {@code $nearSphere} require a rewrite
|
||||
* to {@code $geoWithin}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class CountQuery {
|
||||
|
||||
private Document source;
|
||||
|
||||
private CountQuery(Document source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public static CountQuery of(Document source) {
|
||||
return new CountQuery(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the query {@link Document} that can be used with {@code countDocuments()}. Potentially rewrites the query
|
||||
* to be usable with {@code countDocuments()}.
|
||||
*
|
||||
* @return the query {@link Document} that can be used with {@code countDocuments()}.
|
||||
*/
|
||||
public Document toQueryDocument() {
|
||||
|
||||
if (!requiresRewrite(source)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
Document target = new Document();
|
||||
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Document theValue = (Document) entry.getValue();
|
||||
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Collection<?> source = (Collection<?>) entry.getValue();
|
||||
|
||||
target.put(entry.getKey(), rewriteCollection(source));
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("$and".equals(entry.getKey()) && target.containsKey("$and")) {
|
||||
// Expect $and to be processed with Document and createGeoWithin.
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param valueToInspect
|
||||
* @return {@code true} if the enclosing element needs to be rewritten.
|
||||
*/
|
||||
private boolean requiresRewrite(Object valueToInspect) {
|
||||
|
||||
if (valueToInspect instanceof Document) {
|
||||
return requiresRewrite((Document) valueToInspect);
|
||||
}
|
||||
|
||||
if (valueToInspect instanceof Collection) {
|
||||
return requiresRewrite((Collection) valueToInspect);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Collection<?> collection) {
|
||||
|
||||
for (Object o : collection) {
|
||||
if (o instanceof Document && requiresRewrite((Document) o)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean requiresRewrite(Document document) {
|
||||
|
||||
if (containsNear(document)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (Object entry : document.values()) {
|
||||
|
||||
if (requiresRewrite(entry)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Collection<Object> rewriteCollection(Collection<?> source) {
|
||||
|
||||
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
||||
|
||||
for (Object item : source) {
|
||||
if (item instanceof Document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
|
||||
} else {
|
||||
rewrittenCollection.add(item);
|
||||
}
|
||||
}
|
||||
|
||||
return rewrittenCollection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrite the near query for field {@code key} to {@code $geoWithin}.
|
||||
*
|
||||
* @param key the queried field.
|
||||
* @param source source {@link Document}.
|
||||
* @param $and potentially existing {@code $and} condition.
|
||||
* @return the rewritten query {@link Document}.
|
||||
*/
|
||||
private static Document createGeoWithin(String key, Document source, @Nullable Object $and) {
|
||||
|
||||
boolean spheric = source.containsKey("$nearSphere");
|
||||
Object $near = spheric ? source.get("$nearSphere") : source.get("$near");
|
||||
|
||||
Number maxDistance = source.containsKey("$maxDistance") ? (Number) source.get("$maxDistance") : Double.MAX_VALUE;
|
||||
List<Object> $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance);
|
||||
Document $geoWithinMax = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMax));
|
||||
|
||||
if (!containsNearWithMinDistance(source)) {
|
||||
return new Document(key, $geoWithinMax);
|
||||
}
|
||||
|
||||
Number minDistance = (Number) source.get("$minDistance");
|
||||
List<Object> $centerMin = Arrays.asList(toCenterCoordinates($near), minDistance);
|
||||
Document $geoWithinMin = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMin));
|
||||
|
||||
List<Document> criteria = new ArrayList<>();
|
||||
|
||||
if ($and != null) {
|
||||
if ($and instanceof Collection) {
|
||||
criteria.addAll((Collection) $and);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
|
||||
+ $and);
|
||||
}
|
||||
}
|
||||
|
||||
criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin))));
|
||||
criteria.add(new Document(key, $geoWithinMax));
|
||||
return new Document("$and", criteria);
|
||||
}
|
||||
|
||||
private static boolean containsNear(Document source) {
|
||||
return source.containsKey("$near") || source.containsKey("$nearSphere");
|
||||
}
|
||||
|
||||
private static boolean containsNearWithMinDistance(Document source) {
|
||||
|
||||
if (!containsNear(source)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return source.containsKey("$minDistance");
|
||||
}
|
||||
|
||||
private static Object toCenterCoordinates(Object value) {
|
||||
|
||||
if (ObjectUtils.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Point) {
|
||||
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
|
||||
}
|
||||
|
||||
if (value instanceof Document && ((Document) value).containsKey("x")) {
|
||||
|
||||
Document point = (Document) value;
|
||||
return Arrays.asList(point.get("x"), point.get("y"));
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,15 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.bson.Document;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.FindIterable;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Simple callback interface to allow customization of a {@link FindIterable}.
|
||||
@@ -25,7 +31,14 @@ import com.mongodb.client.FindIterable;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface CursorPreparer {
|
||||
public interface CursorPreparer extends ReadPreferenceAware {
|
||||
|
||||
/**
|
||||
* Default {@link CursorPreparer} just passing on the given {@link FindIterable}.
|
||||
*
|
||||
* @since 2.2
|
||||
*/
|
||||
CursorPreparer NO_OP_PREPARER = (iterable -> iterable);
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
@@ -33,4 +46,37 @@ public interface CursorPreparer {
|
||||
* @param cursor
|
||||
*/
|
||||
FindIterable<Document> prepare(FindIterable<Document> cursor);
|
||||
|
||||
/**
|
||||
* Apply query specific settings to {@link MongoCollection} and initate a find operation returning a
|
||||
* {@link FindIterable} via the given {@link Function find} function.
|
||||
*
|
||||
* @param collection must not be {@literal null}.
|
||||
* @param find must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if one of the required arguments is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default FindIterable<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindIterable<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
}
|
||||
|
||||
return prepare(find.apply(collection));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link ReadPreference} to apply or {@literal null} if none defined.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
default ReadPreference getReadPreference() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,30 +26,32 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
import com.mongodb.client.model.*;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -58,7 +60,10 @@ import com.mongodb.client.model.WriteModel;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -66,7 +71,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final BulkOperationContext bulkOperationContext;
|
||||
private final List<WriteModel<Document>> models = new ArrayList<>();
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
@@ -123,16 +128,9 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
if (document instanceof Document) {
|
||||
|
||||
models.add(new InsertOneModel<>((Document) document));
|
||||
return this;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
mongoOperations.getConverter().write(document, sink);
|
||||
|
||||
models.add(new InsertOneModel<>(sink));
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||
addModel(source, new InsertOneModel<>(getMappedObject(source)));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -246,7 +244,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
|
||||
models.add(new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -267,6 +265,29 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(replacement);
|
||||
addModel(source,
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(source), replaceOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
@@ -276,18 +297,49 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
try {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions);
|
||||
});
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
private BulkWriteResult bulkWriteTo(MongoCollection<Document> collection) {
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
}
|
||||
|
||||
private WriteModel<Document> extractAndMapWriteModel(SourceAwareWriteModelHolder it) {
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.getModel());
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
@@ -302,14 +354,12 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
||||
|
||||
if (multi) {
|
||||
models.add(new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
} else {
|
||||
models.add(new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
}
|
||||
|
||||
return this;
|
||||
@@ -358,6 +408,100 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private void addModel(Object source, WriteModel<Document> model) {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
@@ -372,6 +516,29 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
throw new IllegalStateException("BulkMode was null!");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link BulkOperationContext} holds information about
|
||||
* {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to
|
||||
@@ -387,5 +554,20 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
@NonNull Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
@NonNull QueryMapper queryMapper;
|
||||
@NonNull UpdateMapper updateMapper;
|
||||
ApplicationEventPublisher eventPublisher;
|
||||
EntityCallbacks entityCallbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Value
|
||||
private static class SourceAwareWriteModelHolder {
|
||||
|
||||
Object source;
|
||||
WriteModel<Document> model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -64,7 +64,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) {
|
||||
this(mongoDbFactory, collectionName, queryMapper, null);
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
* {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
@@ -120,19 +120,15 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return execute(collection -> {
|
||||
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
MongoPersistentEntity<?> entity = lookupPersistentEntity(type, collectionName);
|
||||
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
ops.partialFilterExpression(mapper.getMappedObject((Document) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY),
|
||||
lookupPersistentEntity(type, collectionName)));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
Document mappedKeys = mapper.getMappedSort(indexDefinition.getIndexKeys(), entity);
|
||||
return collection.createIndex(mappedKeys, indexOptions);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -192,7 +188,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
|
||||
|
||||
List<IndexInfo> indexInfoList = new ArrayList<IndexInfo>();
|
||||
List<IndexInfo> indexInfoList = new ArrayList<>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
|
||||
@@ -217,4 +213,25 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, callback);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
mapper.getMappedSort((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
|
||||
/**
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}.
|
||||
* {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -29,14 +29,14 @@ import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
*/
|
||||
class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
|
||||
private final MongoDbFactory mongoDbFactory;
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final QueryMapper mapper;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param mapper must not be {@literal null}.
|
||||
*/
|
||||
DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory, QueryMapper mapper) {
|
||||
DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) {
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.mapper = mapper;
|
||||
|
||||
@@ -94,23 +94,16 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
|
||||
ops = ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject(indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY, Document.class), entity));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions);
|
||||
|
||||
}).next();
|
||||
}
|
||||
@@ -126,21 +119,24 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public Mono<Void> dropIndex(final String name) {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes()
|
||||
*/
|
||||
public Mono<Void> dropAllIndexes() {
|
||||
return dropIndex("*");
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo()
|
||||
*/
|
||||
public Flux<IndexInfo> getIndexInfo() {
|
||||
@@ -148,4 +144,25 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
||||
.map(IndexConverters.documentToIndexInfoConverter()::convert);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,13 +42,15 @@ import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}.
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
private static final String SCRIPT_COLLECTION_NAME = "system.js";
|
||||
|
||||
@@ -21,12 +21,14 @@ import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
@@ -34,15 +36,15 @@ import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
*
|
||||
@@ -151,15 +153,59 @@ class EntityOperations {
|
||||
return ID_FIELD;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return the name of the distanceField to use. {@literal dis} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
public String nearQueryDistanceFieldName(Class<?> domainType) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(domainType);
|
||||
if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) {
|
||||
return "dis";
|
||||
}
|
||||
|
||||
String distanceFieldName = "calculated-distance";
|
||||
int counter = 0;
|
||||
while (persistentEntity.getPersistentProperty(distanceFieldName) != null) {
|
||||
distanceFieldName += "-" + (counter++);
|
||||
}
|
||||
|
||||
return distanceFieldName;
|
||||
}
|
||||
|
||||
private static Document parse(String source) {
|
||||
|
||||
try {
|
||||
return Document.parse(source);
|
||||
} catch (JSONParseException | org.bson.json.JsonParseException o_O) {
|
||||
} catch (org.bson.json.JsonParseException o_O) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
} catch (RuntimeException o_O) {
|
||||
|
||||
// legacy 3.x exception
|
||||
if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
}
|
||||
throw o_O;
|
||||
}
|
||||
}
|
||||
|
||||
public <T> TypedOperations<T> forType(@Nullable Class<T> entityClass) {
|
||||
|
||||
if (entityClass != null) {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(entityClass);
|
||||
|
||||
if (entity != null) {
|
||||
return new TypedEntityOperations(entity);
|
||||
}
|
||||
|
||||
}
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of information about an entity.
|
||||
*
|
||||
@@ -189,6 +235,16 @@ class EntityOperations {
|
||||
*/
|
||||
Query getByIdQuery();
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}.
|
||||
*
|
||||
* @return the {@link Query} to use for removing the entity. Never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default Query getRemoveByQuery() {
|
||||
return isVersionedEntity() ? getQueryForVersion() : getByIdQuery();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to find the entity in its current version.
|
||||
*
|
||||
@@ -219,9 +275,11 @@ class EntityOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the version if the entity has a version property, {@literal null} otherwise.
|
||||
* Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}.
|
||||
*
|
||||
* @return
|
||||
* @return the entity version. Can be {@literal null}.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check
|
||||
* {@link #isVersionedEntity()}.
|
||||
*/
|
||||
@Nullable
|
||||
Object getVersion();
|
||||
@@ -235,7 +293,7 @@ class EntityOperations {
|
||||
|
||||
/**
|
||||
* Returns whether the entity is considered to be new.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
* @since 2.1.2
|
||||
*/
|
||||
@@ -277,8 +335,8 @@ class EntityOperations {
|
||||
/**
|
||||
* Returns the current version value if the entity has a version property.
|
||||
*
|
||||
* @return the current version or {@literal null} in case it's uninitialized or the entity doesn't expose a version
|
||||
* property.
|
||||
* @return the current version or {@literal null} in case it's uninitialized.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property.
|
||||
*/
|
||||
@Nullable
|
||||
Number getVersion();
|
||||
@@ -386,7 +444,7 @@ class EntityOperations {
|
||||
return map;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@@ -480,10 +538,10 @@ class EntityOperations {
|
||||
public Query getQueryForVersion() {
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
||||
MongoPersistentProperty property = entity.getRequiredVersionProperty();
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
|
||||
return new Query(Criteria.where(idProperty.getName()).is(getId())//
|
||||
.and(property.getName()).is(getVersion()));
|
||||
.and(versionProperty.getName()).is(getVersion()));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -557,7 +615,7 @@ class EntityOperations {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@@ -667,4 +725,102 @@ class EntityOperations {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-specific operations abstraction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @param <T>
|
||||
* @since 2.2
|
||||
*/
|
||||
interface TypedOperations<T> {
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} for the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation();
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for
|
||||
* the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom
|
||||
* conversions).
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
enum UntypedOperations implements TypedOperations<Object> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static <T> TypedOperations<T> instance() {
|
||||
return (TypedOperations) INSTANCE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} backed by {@link MongoPersistentEntity}.
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class TypedEntityOperations<T> implements TypedOperations<T> {
|
||||
|
||||
private final @NonNull MongoPersistentEntity<T> entity;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query.getCollation().isPresent()) {
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -170,6 +171,18 @@ public interface ExecutableFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -267,6 +268,11 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
this.limit = Optional.of(limit);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadPreference getReadPreference() {
|
||||
return delegate.getReadPreference();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,6 +19,7 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -30,7 +31,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -44,6 +45,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public interface ExecutableMapReduceOperation {
|
||||
@@ -146,6 +148,18 @@ public interface ExecutableMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -119,6 +120,18 @@ public interface ExecutableRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -17,8 +17,11 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
@@ -151,13 +154,16 @@ public interface ExecutableUpdateOperation {
|
||||
interface UpdateWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Set the {@link Update} to be applied.
|
||||
* Set the {@link UpdateDefinition} to be applied.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingUpdate}.
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
TerminatingUpdate<T> apply(Update update);
|
||||
TerminatingUpdate<T> apply(UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Specify {@code replacement} object.
|
||||
@@ -205,6 +211,18 @@ public interface ExecutableUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,7 +21,7 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -67,7 +67,7 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class domainType;
|
||||
Query query;
|
||||
@Nullable Update update;
|
||||
@Nullable UpdateDefinition update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@@ -76,10 +76,10 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(Update)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingUpdate<T> apply(Update update) {
|
||||
public TerminatingUpdate<T> apply(UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
|
||||
|
||||
@@ -33,6 +33,31 @@ public class FindAndModifyOptions {
|
||||
|
||||
private @Nullable Collation collation;
|
||||
|
||||
private static final FindAndModifyOptions NONE = new FindAndModifyOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed. Please use FindAndModifyOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions returnNew(boolean returnNew) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions upsert(boolean upsert) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions remove(boolean remove) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions collation(@Nullable Collation collation) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a FindAndModifyOptions instance
|
||||
*
|
||||
@@ -42,9 +67,19 @@ public class FindAndModifyOptions {
|
||||
return new FindAndModifyOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndModifyOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndModifyOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link FindAndModifyOptions} based on option of given {@litearl source}.
|
||||
*
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return new instance of {@link FindAndModifyOptions}.
|
||||
* @since 2.0
|
||||
|
||||
@@ -36,6 +36,21 @@ public class FindAndReplaceOptions {
|
||||
private boolean returnNew;
|
||||
private boolean upsert;
|
||||
|
||||
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed. Please use FindAndReplaceOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions returnNew() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions upsert() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance.
|
||||
* <dl>
|
||||
@@ -51,6 +66,16 @@ public class FindAndReplaceOptions {
|
||||
return new FindAndReplaceOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndReplaceOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance with
|
||||
* <dl>
|
||||
|
||||
@@ -15,20 +15,69 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Simple callback interface to allow customization of a {@link FindPublisher}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Konstantin Volivach
|
||||
*/
|
||||
public interface FindPublisherPreparer {
|
||||
public interface FindPublisherPreparer extends ReadPreferenceAware {
|
||||
|
||||
/**
|
||||
* Default {@link FindPublisherPreparer} just passing on the given {@link FindPublisher}.
|
||||
*
|
||||
* @since 2.2
|
||||
*/
|
||||
FindPublisherPreparer NO_OP_PREPARER = (findPublisher -> findPublisher);
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
*
|
||||
* @param findPublisher must not be {@literal null}.
|
||||
*/
|
||||
<T> FindPublisher<T> prepare(FindPublisher<T> findPublisher);
|
||||
FindPublisher<Document> prepare(FindPublisher<Document> findPublisher);
|
||||
|
||||
/**
|
||||
* Apply query specific settings to {@link MongoCollection} and initate a find operation returning a
|
||||
* {@link FindPublisher} via the given {@link Function find} function.
|
||||
*
|
||||
* @param collection must not be {@literal null}.
|
||||
* @param find must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if one of the required arguments is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default FindPublisher<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindPublisher<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
}
|
||||
|
||||
return prepare(find.apply(collection));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link ReadPreference} to apply or {@literal null} if none defined.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
default ReadPreference getReadPreference() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,7 +80,11 @@ public class MappedDocument {
|
||||
}
|
||||
|
||||
public Bson getIdFilter() {
|
||||
return Filters.eq(ID_FIELD, document.get(ID_FIELD));
|
||||
return new Document(ID_FIELD, document.get(ID_FIELD));
|
||||
}
|
||||
|
||||
public Object get(String key) {
|
||||
return document.get(key);
|
||||
}
|
||||
|
||||
public UpdateDefinition updateWithoutId() {
|
||||
@@ -92,7 +96,7 @@ public class MappedDocument {
|
||||
* mapped to the specific domain type.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1.4
|
||||
* @since 2.2
|
||||
*/
|
||||
class MappedUpdate implements UpdateDefinition {
|
||||
|
||||
@@ -137,5 +141,14 @@ public class MappedDocument {
|
||||
public Boolean isIsolated() {
|
||||
return delegate.isIsolated();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,210 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
* domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names}
|
||||
* and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public MongoJsonSchema createSchemaFor(Class<?> type) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty nested : entity) {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
break;
|
||||
}
|
||||
|
||||
currentPath.add(nested);
|
||||
schemaProperties.add(computeSchemaForProperty(currentPath));
|
||||
}
|
||||
|
||||
return schemaProperties;
|
||||
}
|
||||
|
||||
private JsonSchemaProperty computeSchemaForProperty(List<MongoPersistentProperty> path) {
|
||||
|
||||
MongoPersistentProperty property = CollectionUtils.lastElement(path);
|
||||
|
||||
boolean required = isRequiredProperty(property);
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentProperty property, boolean required) {
|
||||
|
||||
ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName());
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(path,
|
||||
mappingContext.getRequiredPersistentEntity(property));
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) {
|
||||
return createSchemaProperty(fieldName, type, required, Collections.emptyList());
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
|
||||
return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName()
|
||||
: property.getName();
|
||||
}
|
||||
|
||||
private boolean isRequiredProperty(PersistentProperty property) {
|
||||
return property.getType().isPrimitive();
|
||||
}
|
||||
|
||||
private Class<?> computeTargetType(PersistentProperty<?> property) {
|
||||
|
||||
if (!(property instanceof MongoPersistentProperty)) {
|
||||
return property.getType();
|
||||
}
|
||||
|
||||
MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property;
|
||||
if (!mongoProperty.isIdProperty()) {
|
||||
return mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
if (mongoProperty.hasExplicitWriteTarget()) {
|
||||
return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass();
|
||||
}
|
||||
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
@@ -20,7 +20,7 @@ import org.springframework.jmx.export.annotation.ManagedOperation;
|
||||
import org.springframework.jmx.export.annotation.ManagedResource;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
@@ -36,10 +36,14 @@ public class MongoAdmin implements MongoAdminOperations {
|
||||
|
||||
private final MongoClient mongoClient;
|
||||
|
||||
public MongoAdmin(MongoClient mongoClient) {
|
||||
/**
|
||||
* @param client the underlying {@link com.mongodb.client.MongoClient} used for data access.
|
||||
* @since 2.2
|
||||
*/
|
||||
public MongoAdmin(MongoClient client) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
this.mongoClient = mongoClient;
|
||||
Assert.notNull(client, "Client must not be null!");
|
||||
this.mongoClient = client;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
|
||||
@@ -16,66 +16,72 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.SpringDataMongoDB;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ConnectionString;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.client.MongoClient;
|
||||
import com.mongodb.client.MongoClients;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.event.ClusterListener;
|
||||
|
||||
/**
|
||||
* Convenient factory for configuring MongoDB.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
*/
|
||||
public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> implements PersistenceExceptionTranslator {
|
||||
|
||||
private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator();
|
||||
|
||||
private @Nullable MongoClientOptions mongoClientOptions;
|
||||
private @Nullable MongoClientSettings mongoClientSettings;
|
||||
private @Nullable String host;
|
||||
private @Nullable Integer port;
|
||||
private List<ServerAddress> replicaSetSeeds = Collections.emptyList();
|
||||
private List<MongoCredential> credentials = Collections.emptyList();
|
||||
private @Nullable List<MongoCredential> credential = null;
|
||||
private @Nullable ConnectionString connectionString;
|
||||
private @Nullable String replicaSet = null;
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}.
|
||||
* Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param mongoClientOptions
|
||||
*/
|
||||
public void setMongoClientOptions(@Nullable MongoClientOptions mongoClientOptions) {
|
||||
this.mongoClientOptions = mongoClientOptions;
|
||||
public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) {
|
||||
this.mongoClientSettings = mongoClientOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of credentials to be used when creating {@link MongoClient}.
|
||||
*
|
||||
* @param credentials can be {@literal null}.
|
||||
* @param credential can be {@literal null}.
|
||||
*/
|
||||
public void setCredentials(@Nullable MongoCredential[] credentials) {
|
||||
this.credentials = filterNonNullElementsAsList(credentials);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the list of {@link ServerAddress} to build up a replica set for.
|
||||
*
|
||||
* @param replicaSetSeeds can be {@literal null}.
|
||||
*/
|
||||
public void setReplicaSetSeeds(@Nullable ServerAddress[] replicaSetSeeds) {
|
||||
this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds);
|
||||
public void setCredential(@Nullable MongoCredential[] credential) {
|
||||
this.credential = Arrays.asList(credential);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,6 +102,14 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
public void setConnectionString(@Nullable ConnectionString connectionString) {
|
||||
this.connectionString = connectionString;
|
||||
}
|
||||
|
||||
public void setReplicaSet(@Nullable String replicaSet) {
|
||||
this.replicaSet = replicaSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to use.
|
||||
*
|
||||
@@ -128,12 +142,198 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
*/
|
||||
@Override
|
||||
protected MongoClient createInstance() throws Exception {
|
||||
return createMongoClient(computeClientSetting());
|
||||
}
|
||||
|
||||
if (mongoClientOptions == null) {
|
||||
mongoClientOptions = MongoClientOptions.builder().build();
|
||||
/**
|
||||
* Create {@link MongoClientSettings} based on configuration and priority (lower is better).
|
||||
* <ol>
|
||||
* <li>{@link MongoClientFactoryBean#mongoClientSettings}</li>
|
||||
* <li>{@link MongoClientFactoryBean#connectionString}</li>
|
||||
* <li>default {@link MongoClientSettings}</li>
|
||||
* </ol>
|
||||
*
|
||||
* @since 3.0
|
||||
*/
|
||||
protected MongoClientSettings computeClientSetting() {
|
||||
|
||||
if (connectionString != null && (StringUtils.hasText(host) || port != null)) {
|
||||
throw new IllegalStateException("ConnectionString and host/port configuration exclude one another!");
|
||||
}
|
||||
|
||||
return createMongoClient();
|
||||
ConnectionString connectionString = this.connectionString != null ? this.connectionString
|
||||
: new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()),
|
||||
getOrDefault(port, "" + ServerAddress.defaultPort())));
|
||||
|
||||
Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString);
|
||||
builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY);
|
||||
|
||||
if (mongoClientSettings != null) {
|
||||
|
||||
MongoClientSettings defaultSettings = MongoClientSettings.builder().build();
|
||||
|
||||
SslSettings sslSettings = mongoClientSettings.getSslSettings();
|
||||
ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings();
|
||||
ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings();
|
||||
SocketSettings socketSettings = mongoClientSettings.getSocketSettings();
|
||||
ServerSettings serverSettings = mongoClientSettings.getServerSettings();
|
||||
|
||||
builder = builder //
|
||||
.applicationName(computeSettingsValue(defaultSettings.getApplicationName(),
|
||||
mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) //
|
||||
.applyToSslSettings(settings -> {
|
||||
|
||||
applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled()));
|
||||
applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed,
|
||||
defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed())));
|
||||
settings.context(sslSettings.getContext());
|
||||
}).applyToClusterSettings(settings -> {
|
||||
|
||||
applySettings(settings::hosts,
|
||||
computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList())));
|
||||
|
||||
applySettings(settings::requiredReplicaSetName,
|
||||
computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(),
|
||||
clusterSettings, connectionString.getRequiredReplicaSetName()));
|
||||
|
||||
applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
|
||||
applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold()));
|
||||
|
||||
applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getClusterSettings(), clusterSettings,
|
||||
connectionString.getServerSelectionTimeout()));
|
||||
|
||||
applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null));
|
||||
List<ClusterListener> clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners,
|
||||
defaultSettings.getClusterSettings(), clusterSettings, null);
|
||||
if (clusterListeners != null) {
|
||||
clusterListeners.forEach(settings::addClusterListener);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionIdleTime()));
|
||||
|
||||
applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxConnectionLifeTime()));
|
||||
|
||||
applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings,
|
||||
connectionString.getMaxWaitTime()));
|
||||
|
||||
applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue(
|
||||
(ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null));
|
||||
|
||||
applySettings(settings::minSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMinConnectionPoolSize()));
|
||||
applySettings(settings::maxSize,
|
||||
computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(),
|
||||
connectionPoolSettings, connectionString.getMaxConnectionPoolSize()));
|
||||
}) //
|
||||
.applyToSocketSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout()));
|
||||
|
||||
applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout()));
|
||||
applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize,
|
||||
defaultSettings.getSocketSettings(), socketSettings, null));
|
||||
}) //
|
||||
.applyToServerSettings(settings -> {
|
||||
|
||||
applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, null));
|
||||
|
||||
applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS),
|
||||
computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS),
|
||||
defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency()));
|
||||
settings.applySettings(serverSettings);
|
||||
}) //
|
||||
.autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) //
|
||||
.codecRegistry(mongoClientSettings.getCodecRegistry()); //
|
||||
|
||||
applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(),
|
||||
mongoClientSettings.getReadConcern(), connectionString.getReadConcern()));
|
||||
applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(),
|
||||
mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern()));
|
||||
applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(),
|
||||
mongoClientSettings.getReadPreference(), connectionString.getReadPreference()));
|
||||
applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(),
|
||||
mongoClientSettings.getRetryReads(), connectionString.getRetryReads()));
|
||||
applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(),
|
||||
mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue()));
|
||||
applySettings(builder::uuidRepresentation,
|
||||
computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY));
|
||||
}
|
||||
|
||||
if (!CollectionUtils.isEmpty(credential)) {
|
||||
builder = builder.credential(credential.iterator().next());
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(replicaSet)) {
|
||||
builder.applyToClusterSettings((settings) -> {
|
||||
settings.requiredReplicaSetName(replicaSet);
|
||||
});
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private <T> void applySettings(Consumer<T> settingsBuilder, @Nullable T value) {
|
||||
|
||||
if (ObjectUtils.isEmpty(value)) {
|
||||
return;
|
||||
}
|
||||
settingsBuilder.accept(value);
|
||||
}
|
||||
|
||||
private <S, T> T computeSettingsValue(Function<S, T> function, S defaultValueHolder, S settingsValueHolder,
|
||||
@Nullable T connectionStringValue) {
|
||||
return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder),
|
||||
connectionStringValue);
|
||||
}
|
||||
|
||||
private <T> T computeSettingsValue(T defaultValue, T fromSettings, T fromConnectionString) {
|
||||
|
||||
boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings);
|
||||
boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString);
|
||||
|
||||
if (!fromSettingsIsDefault) {
|
||||
return fromSettings;
|
||||
}
|
||||
return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -148,43 +348,11 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
||||
}
|
||||
}
|
||||
|
||||
private MongoClient createMongoClient() throws UnknownHostException {
|
||||
|
||||
if (!CollectionUtils.isEmpty(replicaSetSeeds)) {
|
||||
return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions);
|
||||
}
|
||||
|
||||
return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions);
|
||||
private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException {
|
||||
return MongoClients.create(settings, SpringDataMongoDB.driverInformation());
|
||||
}
|
||||
|
||||
private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException {
|
||||
|
||||
ServerAddress defaultAddress = new ServerAddress();
|
||||
|
||||
return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(),
|
||||
port != null ? port.intValue() : defaultAddress.getPort());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given array as {@link List} with all {@literal null} elements removed.
|
||||
*
|
||||
* @param elements the elements to filter <T>, can be {@literal null}.
|
||||
* @return a new unmodifiable {@link List#} from the given elements without {@literal null}s.
|
||||
*/
|
||||
private static <T> List<T> filterNonNullElementsAsList(@Nullable T[] elements) {
|
||||
|
||||
if (elements == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<T> candidateElements = new ArrayList<T>();
|
||||
|
||||
for (T element : elements) {
|
||||
if (element != null) {
|
||||
candidateElements.add(element);
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableList(candidateElements);
|
||||
private String getOrDefault(Object value, String defaultValue) {
|
||||
return !StringUtils.isEmpty(value) ? value.toString() : defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,314 +0,0 @@
|
||||
/*
|
||||
* Copyright 2015-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import javax.net.SocketFactory;
|
||||
import javax.net.ssl.SSLSocketFactory;
|
||||
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.DBDecoderFactory;
|
||||
import com.mongodb.DBEncoderFactory;
|
||||
import com.mongodb.MongoClient;
|
||||
import com.mongodb.MongoClientOptions;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientOptions} instance.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
*/
|
||||
public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClientOptions> {
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getDescription();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
.getThreadsAllowedToBlockForConnectionMultiplier();
|
||||
private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime();
|
||||
private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime();
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory();
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout();
|
||||
private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout();
|
||||
private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName();
|
||||
private int serverSelectionTimeout = DEFAULT_MONGO_OPTIONS.getServerSelectionTimeout();
|
||||
|
||||
private boolean ssl;
|
||||
private @Nullable SSLSocketFactory sslSocketFactory;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClient} description.
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum number of connections per host.
|
||||
*
|
||||
* @param minConnectionsPerHost
|
||||
*/
|
||||
public void setMinConnectionsPerHost(int minConnectionsPerHost) {
|
||||
this.minConnectionsPerHost = minConnectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of connections allowed per host. Will block if run out. Default is 10. System property
|
||||
* {@code MONGO.POOLSIZE} can override
|
||||
*
|
||||
* @param connectionsPerHost
|
||||
*/
|
||||
public void setConnectionsPerHost(int connectionsPerHost) {
|
||||
this.connectionsPerHost = connectionsPerHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is
|
||||
* 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an
|
||||
* exception will be thrown.
|
||||
*
|
||||
* @param threadsAllowedToBlockForConnectionMultiplier
|
||||
*/
|
||||
public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) {
|
||||
this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes)
|
||||
*
|
||||
* @param maxWaitTime
|
||||
*/
|
||||
public void setMaxWaitTime(int maxWaitTime) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum idle time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionIdleTime
|
||||
*/
|
||||
public void setMaxConnectionIdleTime(int maxConnectionIdleTime) {
|
||||
this.maxConnectionIdleTime = maxConnectionIdleTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum life time for a pooled connection.
|
||||
*
|
||||
* @param maxConnectionLifeTime
|
||||
*/
|
||||
public void setMaxConnectionLifeTime(int maxConnectionLifeTime) {
|
||||
this.maxConnectionLifeTime = maxConnectionLifeTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout in milliseconds. 0 is default and infinite.
|
||||
*
|
||||
* @param connectTimeout
|
||||
*/
|
||||
public void setConnectTimeout(int connectTimeout) {
|
||||
this.connectTimeout = connectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout. 0 is default and infinite.
|
||||
*
|
||||
* @param socketTimeout
|
||||
*/
|
||||
public void setSocketTimeout(int socketTimeout) {
|
||||
this.socketTimeout = socketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false.
|
||||
*
|
||||
* @param socketKeepAlive
|
||||
*/
|
||||
public void setSocketKeepAlive(boolean socketKeepAlive) {
|
||||
this.socketKeepAlive = socketKeepAlive;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(@Nullable ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB
|
||||
* object.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(@Nullable WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketFactory
|
||||
*/
|
||||
public void setSocketFactory(@Nullable SocketFactory socketFactory) {
|
||||
this.socketFactory = socketFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the frequency that the driver will attempt to determine the current state of each server in the cluster.
|
||||
*
|
||||
* @param heartbeatFrequency
|
||||
*/
|
||||
public void setHeartbeatFrequency(int heartbeatFrequency) {
|
||||
this.heartbeatFrequency = heartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* In the event that the driver has to frequently re-check a server's availability, it will wait at least this long
|
||||
* since the previous check to avoid wasted effort.
|
||||
*
|
||||
* @param minHeartbeatFrequency
|
||||
*/
|
||||
public void setMinHeartbeatFrequency(int minHeartbeatFrequency) {
|
||||
this.minHeartbeatFrequency = minHeartbeatFrequency;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the connect timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatConnectTimeout
|
||||
*/
|
||||
public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) {
|
||||
this.heartbeatConnectTimeout = heartbeatConnectTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the socket timeout for connections used for the cluster heartbeat.
|
||||
*
|
||||
* @param heartbeatSocketTimeout
|
||||
*/
|
||||
public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) {
|
||||
this.heartbeatSocketTimeout = heartbeatSocketTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the name of the replica set.
|
||||
*
|
||||
* @param requiredReplicaSetName
|
||||
*/
|
||||
public void setRequiredReplicaSetName(String requiredReplicaSetName) {
|
||||
this.requiredReplicaSetName = requiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to |@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
public void setSsl(boolean ssl) {
|
||||
this.ssl = ssl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here,
|
||||
* {@link SSLSocketFactory#getDefault()} will be used.
|
||||
*
|
||||
* @param sslSocketFactory
|
||||
*/
|
||||
public void setSslSocketFactory(@Nullable SSLSocketFactory sslSocketFactory) {
|
||||
|
||||
this.sslSocketFactory = sslSocketFactory;
|
||||
this.ssl = sslSocketFactory != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@literal server selection timeout} in msec for a 3.x MongoDB Java driver. If not set the default value of
|
||||
* 30 sec will be used. A value of 0 means that it will timeout immediately if no server is available. A negative
|
||||
* value means to wait indefinitely.
|
||||
*
|
||||
* @param serverSelectionTimeout in msec.
|
||||
*/
|
||||
public void setServerSelectionTimeout(int serverSelectionTimeout) {
|
||||
this.serverSelectionTimeout = serverSelectionTimeout;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||
*/
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@Override
|
||||
protected MongoClientOptions createInstance() throws Exception {
|
||||
|
||||
SocketFactory socketFactoryToUse = ssl
|
||||
? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault())
|
||||
: this.socketFactory;
|
||||
|
||||
return MongoClientOptions.builder() //
|
||||
.alwaysUseMBeans(this.alwaysUseMBeans) //
|
||||
.connectionsPerHost(this.connectionsPerHost) //
|
||||
.connectTimeout(connectTimeout) //
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.description(description) //
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
.maxConnectionIdleTime(maxConnectionIdleTime) //
|
||||
.maxConnectionLifeTime(maxConnectionLifeTime) //
|
||||
.maxWaitTime(maxWaitTime) //
|
||||
.minConnectionsPerHost(minConnectionsPerHost) //
|
||||
.minHeartbeatFrequency(minHeartbeatFrequency) //
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.socketFactory(socketFactoryToUse) //
|
||||
.socketKeepAlive(socketKeepAlive) //
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientOptions.class;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,486 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.bson.UuidRepresentation;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private @Nullable Boolean retryReads = null;
|
||||
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private @Nullable Boolean retryWrites = null;
|
||||
|
||||
private @Nullable String applicationName = null;
|
||||
|
||||
private @Nullable UuidRepresentation uUidRepresentation = null;
|
||||
|
||||
// --> Socket Settings
|
||||
|
||||
private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings()
|
||||
.getConnectTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS);
|
||||
private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize();
|
||||
private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize();
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost();
|
||||
private List<ServerAddress> clusterHosts = Collections.emptyList();
|
||||
private @Nullable ClusterConnectionMode clusterConnectionMode = null;
|
||||
private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType();
|
||||
private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getRequiredReplicaSetName();
|
||||
private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getLocalThreshold(TimeUnit.MILLISECONDS);
|
||||
private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings()
|
||||
.getServerSelectionTimeout(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize();
|
||||
private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize();
|
||||
private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxWaitTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS);
|
||||
private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings()
|
||||
.getMaintenanceFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled();
|
||||
private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed();
|
||||
private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled()
|
||||
? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName()
|
||||
: "";
|
||||
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) {
|
||||
this.socketConnectTimeoutMS = socketConnectTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReadTimeoutMS in msec
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit)
|
||||
*/
|
||||
public void setSocketReadTimeoutMS(int socketReadTimeoutMS) {
|
||||
this.socketReadTimeoutMS = socketReadTimeoutMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketReceiveBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int)
|
||||
*/
|
||||
public void setSocketReceiveBufferSize(int socketReceiveBufferSize) {
|
||||
this.socketReceiveBufferSize = socketReceiveBufferSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param socketSendBufferSize
|
||||
* @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int)
|
||||
*/
|
||||
public void setSocketSendBufferSize(int socketSendBufferSize) {
|
||||
this.socketSendBufferSize = socketSendBufferSize;
|
||||
}
|
||||
|
||||
// --> Server Settings
|
||||
|
||||
private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings()
|
||||
.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS);
|
||||
|
||||
/**
|
||||
* @param serverHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) {
|
||||
this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverMinHeartbeatFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) {
|
||||
this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS;
|
||||
}
|
||||
|
||||
// --> Cluster Settings
|
||||
|
||||
/**
|
||||
* @param clusterSrvHost
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String)
|
||||
*/
|
||||
public void setClusterSrvHost(String clusterSrvHost) {
|
||||
this.clusterSrvHost = clusterSrvHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterHosts
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#hosts(List)
|
||||
*/
|
||||
public void setClusterHosts(ServerAddress[] clusterHosts) {
|
||||
this.clusterHosts = Arrays.asList(clusterHosts);
|
||||
}
|
||||
|
||||
/**
|
||||
* ????
|
||||
*
|
||||
* @param clusterConnectionMode
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode)
|
||||
*/
|
||||
public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) {
|
||||
this.clusterConnectionMode = clusterConnectionMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param custerRequiredClusterType
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType)
|
||||
*/
|
||||
public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) {
|
||||
this.custerRequiredClusterType = custerRequiredClusterType;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterRequiredReplicaSetName
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String)
|
||||
*/
|
||||
public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) {
|
||||
this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterLocalThresholdMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) {
|
||||
this.clusterLocalThresholdMS = clusterLocalThresholdMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param clusterServerSelectionTimeoutMS in msec
|
||||
* @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit)
|
||||
*/
|
||||
public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) {
|
||||
this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS;
|
||||
}
|
||||
|
||||
// --> ConnectionPoolSettings
|
||||
|
||||
/**
|
||||
* @param poolMaxSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int)
|
||||
*/
|
||||
public void setPoolMaxSize(int poolMaxSize) {
|
||||
this.poolMaxSize = poolMaxSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMinSize
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int)
|
||||
*/
|
||||
public void setPoolMinSize(int poolMinSize) {
|
||||
this.poolMinSize = poolMinSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxWaitTimeMS in mesec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) {
|
||||
this.poolMaxWaitTimeMS = poolMaxWaitTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionLifeTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) {
|
||||
this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaxConnectionIdleTimeMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) {
|
||||
this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceInitialDelayMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) {
|
||||
this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param poolMaintenanceFrequencyMS in msec
|
||||
* @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit)
|
||||
*/
|
||||
public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) {
|
||||
this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS;
|
||||
}
|
||||
|
||||
// --> SSL Settings
|
||||
|
||||
/**
|
||||
* @param sslEnabled
|
||||
* @see com.mongodb.connection.SslSettings.Builder#enabled(boolean)
|
||||
*/
|
||||
public void setSslEnabled(Boolean sslEnabled) {
|
||||
this.sslEnabled = sslEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslInvalidHostNameAllowed
|
||||
* @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean)
|
||||
*/
|
||||
public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) {
|
||||
this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sslProvider
|
||||
* @see com.mongodb.connection.SslSettings.Builder#context(SSLContext)
|
||||
* @see SSLContext#getInstance(String)
|
||||
*/
|
||||
public void setSslProvider(String sslProvider) {
|
||||
this.sslProvider = sslProvider;
|
||||
}
|
||||
|
||||
// encryption and retry
|
||||
|
||||
/**
|
||||
* @param applicationName
|
||||
* @see MongoClientSettings.Builder#applicationName(String)
|
||||
*/
|
||||
public void setApplicationName(@Nullable String applicationName) {
|
||||
this.applicationName = applicationName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryReads
|
||||
* @see MongoClientSettings.Builder#retryReads(boolean)
|
||||
*/
|
||||
public void setRetryReads(@Nullable Boolean retryReads) {
|
||||
this.retryReads = retryReads;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readConcern
|
||||
* @see MongoClientSettings.Builder#readConcern(ReadConcern)
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param writeConcern
|
||||
* @see MongoClientSettings.Builder#writeConcern(WriteConcern)
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param retryWrites
|
||||
* @see MongoClientSettings.Builder#retryWrites(boolean)
|
||||
*/
|
||||
public void setRetryWrites(@Nullable Boolean retryWrites) {
|
||||
this.retryWrites = retryWrites;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param readPreference
|
||||
* @see MongoClientSettings.Builder#readPreference(ReadPreference)
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param streamFactoryFactory
|
||||
* @see MongoClientSettings.Builder#streamFactoryFactory(StreamFactoryFactory)
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param codecRegistry
|
||||
* @see MongoClientSettings.Builder#codecRegistry(CodecRegistry)
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param uUidRepresentation
|
||||
*/
|
||||
public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) {
|
||||
this.uUidRepresentation = uUidRepresentation;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings)
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() {
|
||||
|
||||
Builder builder = MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.applicationName(applicationName) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.applyToClusterSettings((settings) -> {
|
||||
|
||||
settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
if (clusterConnectionMode != null) {
|
||||
settings.mode(clusterConnectionMode);
|
||||
}
|
||||
settings.requiredReplicaSetName(clusterRequiredReplicaSetName);
|
||||
|
||||
if (!CollectionUtils.isEmpty(clusterHosts)) {
|
||||
settings.hosts(clusterHosts);
|
||||
}
|
||||
settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(clusterMaxWaitQueueSize);
|
||||
settings.requiredClusterType(custerRequiredClusterType);
|
||||
|
||||
if (StringUtils.hasText(clusterSrvHost)) {
|
||||
settings.srvHost(clusterSrvHost);
|
||||
}
|
||||
}) //
|
||||
.applyToConnectionPoolSettings((settings) -> {
|
||||
|
||||
settings.minSize(poolMinSize);
|
||||
settings.maxSize(poolMaxSize);
|
||||
settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS);
|
||||
settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS);
|
||||
// settings.maxWaitQueueSize(poolMaxWaitQueueSize);
|
||||
settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToServerSettings((settings) -> {
|
||||
|
||||
settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS);
|
||||
}) //
|
||||
.applyToSocketSettings((settings) -> {
|
||||
|
||||
settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS);
|
||||
settings.receiveBufferSize(socketReceiveBufferSize);
|
||||
settings.sendBufferSize(socketSendBufferSize);
|
||||
}) //
|
||||
.applyToSslSettings((settings) -> {
|
||||
|
||||
settings.enabled(sslEnabled);
|
||||
if (sslEnabled) {
|
||||
|
||||
settings.invalidHostNameAllowed(sslInvalidHostNameAllowed);
|
||||
try {
|
||||
settings.context(
|
||||
StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault());
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalArgumentException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (streamFactoryFactory != null) {
|
||||
builder = builder.streamFactoryFactory(streamFactoryFactory);
|
||||
}
|
||||
if (retryReads != null) {
|
||||
builder = builder.retryReads(retryReads);
|
||||
}
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
|
||||
if (uUidRepresentation != null) {
|
||||
builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
@@ -16,10 +16,9 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteResult;
|
||||
import com.mongodb.WriteConcernResult;
|
||||
|
||||
/**
|
||||
* Mongo-specific {@link DataIntegrityViolationException}.
|
||||
@@ -30,18 +29,18 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
|
||||
private static final long serialVersionUID = -186980521176764046L;
|
||||
|
||||
private final WriteResult writeResult;
|
||||
private final WriteConcernResult writeResult;
|
||||
private final MongoActionOperation actionOperation;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}.
|
||||
* Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}.
|
||||
*
|
||||
* @param message the exception message
|
||||
* @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}.
|
||||
* @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}.
|
||||
* @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}.
|
||||
*/
|
||||
public MongoDataIntegrityViolationException(String message, WriteResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult,
|
||||
MongoActionOperation actionOperation) {
|
||||
|
||||
super(message);
|
||||
|
||||
@@ -53,11 +52,11 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link WriteResult} that caused the exception.
|
||||
* Returns the {@link WriteConcernResult} that caused the exception.
|
||||
*
|
||||
* @return the writeResult
|
||||
*/
|
||||
public WriteResult getWriteResult() {
|
||||
public WriteConcernResult getWriteResult() {
|
||||
return writeResult;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,260 @@
|
||||
/*
|
||||
* Copyright 2018-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 3.0
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
*/
|
||||
public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
* {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return getMongoDatabase(getDefaultDatabaseName());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDatabaseFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getMongoDatabase(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,26 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.DB;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} and {@link com.mongodb.MongoClient}
|
||||
* defining common properties such as database name and exception translator.
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
@@ -42,22 +27,16 @@ import com.mongodb.client.MongoDatabase;
|
||||
* @author Mark Paluch
|
||||
* @param <C> Client type.
|
||||
* @since 2.1
|
||||
* @see SimpleMongoDbFactory
|
||||
* @see SimpleMongoClientDbFactory
|
||||
* @see SimpleMongoClientDatabaseFactory
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactorySupport} instead.
|
||||
*/
|
||||
public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
|
||||
private final C mongoClient;
|
||||
private final String databaseName;
|
||||
private final boolean mongoInstanceCreated;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private @Nullable WriteConcern writeConcern;
|
||||
@Deprecated
|
||||
public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySupport<C> {
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||
*
|
||||
*
|
||||
* @param mongoClient must not be {@literal null}.
|
||||
* @param databaseName must not be {@literal null} or empty.
|
||||
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||
@@ -66,207 +45,6 @@ public abstract class MongoDbFactorySupport<C> implements MongoDbFactory {
|
||||
*/
|
||||
protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||
|
||||
this.mongoClient = mongoClient;
|
||||
this.databaseName = databaseName;
|
||||
this.mongoInstanceCreated = mongoInstanceCreated;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
super(mongoClient, databaseName, mongoInstanceCreated, exceptionTranslator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created.
|
||||
*
|
||||
* @param writeConcern the writeConcern to set
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return getDb(databaseName);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
|
||||
Assert.hasText(dbName, "Database name must not be empty!");
|
||||
|
||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||
|
||||
if (writeConcern == null) {
|
||||
return db;
|
||||
}
|
||||
|
||||
return db.withWriteConcern(writeConcern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link MongoDatabase} from the client.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return
|
||||
*/
|
||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||
*/
|
||||
public void destroy() throws Exception {
|
||||
if (mongoInstanceCreated) {
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return this.exceptionTranslator;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||
*/
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return new MongoDbFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the client instance.
|
||||
*/
|
||||
protected abstract void closeClient();
|
||||
|
||||
/**
|
||||
* @return the Mongo client object.
|
||||
*/
|
||||
protected C getMongoClient() {
|
||||
return mongoClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the database name.
|
||||
*/
|
||||
protected String getDefaultDatabaseName() {
|
||||
return databaseName;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ClientSession} bound {@link MongoDbFactory} decorating the database with a
|
||||
* {@link SessionAwareMethodInterceptor}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDbFactory {
|
||||
|
||||
ClientSession session;
|
||||
MongoDbFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb() throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return proxyMongoDatabase(delegate.getDb(dbName));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||
*/
|
||||
@Override
|
||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||
return delegate.getExceptionTranslator();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb()
|
||||
*/
|
||||
@Override
|
||||
public DB getLegacyDb() {
|
||||
return delegate.getLegacyDb();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||
*/
|
||||
@Override
|
||||
public ClientSession getSession(ClientSessionOptions options) {
|
||||
return delegate.getSession(options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||
*/
|
||||
@Override
|
||||
public MongoDbFactory withSession(ClientSession session) {
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase proxyMongoDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
private MongoCollection<?> proxyCollection(com.mongodb.session.ClientSession session,
|
||||
MongoCollection<?> collection) {
|
||||
return createProxyInstance(session, collection, MongoCollection.class);
|
||||
}
|
||||
|
||||
private <T> T createProxyInstance(com.mongodb.session.ClientSession session, T target, Class<T> targetType) {
|
||||
|
||||
ProxyFactory factory = new ProxyFactory();
|
||||
factory.setTarget(target);
|
||||
factory.setInterfaces(targetType);
|
||||
factory.setOpaque(true);
|
||||
|
||||
factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class,
|
||||
this::proxyDatabase, MongoCollection.class, this::proxyCollection));
|
||||
|
||||
return targetType.cast(factory.getProxy());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link AutoEncryptionSettings} using the {@link AutoEncryptionSettings.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class MongoEncryptionSettingsFactoryBean implements FactoryBean<AutoEncryptionSettings> {
|
||||
|
||||
private boolean bypassAutoEncryption;
|
||||
private String keyVaultNamespace;
|
||||
private Map<String, Object> extraOptions;
|
||||
private MongoClientSettings keyVaultClientSettings;
|
||||
private Map<String, Map<String, Object>> kmsProviders;
|
||||
private Map<String, BsonDocument> schemaMap;
|
||||
|
||||
/**
|
||||
* @param bypassAutoEncryption
|
||||
* @see AutoEncryptionSettings.Builder#bypassAutoEncryption(boolean)
|
||||
*/
|
||||
public void setBypassAutoEncryption(boolean bypassAutoEncryption) {
|
||||
this.bypassAutoEncryption = bypassAutoEncryption;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param extraOptions
|
||||
* @see AutoEncryptionSettings.Builder#extraOptions(Map)
|
||||
*/
|
||||
public void setExtraOptions(Map<String, Object> extraOptions) {
|
||||
this.extraOptions = extraOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultNamespace
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultNamespace(String)
|
||||
*/
|
||||
public void setKeyVaultNamespace(String keyVaultNamespace) {
|
||||
this.keyVaultNamespace = keyVaultNamespace;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultClientSettings
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultMongoClientSettings(MongoClientSettings)
|
||||
*/
|
||||
public void setKeyVaultClientSettings(MongoClientSettings keyVaultClientSettings) {
|
||||
this.keyVaultClientSettings = keyVaultClientSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param kmsProviders
|
||||
* @see AutoEncryptionSettings.Builder#kmsProviders(Map)
|
||||
*/
|
||||
public void setKmsProviders(Map<String, Map<String, Object>> kmsProviders) {
|
||||
this.kmsProviders = kmsProviders;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param schemaMap
|
||||
* @see AutoEncryptionSettings.Builder#schemaMap(Map)
|
||||
*/
|
||||
public void setSchemaMap(Map<String, BsonDocument> schemaMap) {
|
||||
this.schemaMap = schemaMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public AutoEncryptionSettings getObject() {
|
||||
|
||||
return AutoEncryptionSettings.builder() //
|
||||
.bypassAutoEncryption(bypassAutoEncryption) //
|
||||
.keyVaultNamespace(keyVaultNamespace) //
|
||||
.keyVaultMongoClientSettings(keyVaultClientSettings) //
|
||||
.kmsProviders(orEmpty(kmsProviders)) //
|
||||
.extraOptions(orEmpty(extraOptions)) //
|
||||
.schemaMap(orEmpty(schemaMap)) //
|
||||
.build();
|
||||
}
|
||||
|
||||
private <K, V> Map<K, V> orEmpty(@Nullable Map<K, V> source) {
|
||||
return source != null ? source : Collections.emptyMap();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return AutoEncryptionSettings.class;
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,6 @@ import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.InvalidDataAccessResourceUsageException;
|
||||
import org.springframework.dao.PermissionDeniedDataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.MongoTransactionException;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
@@ -37,7 +36,6 @@ import org.springframework.data.mongodb.util.MongoDbErrorCodes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
@@ -112,10 +110,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
return new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof BulkWriteException) {
|
||||
return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex);
|
||||
}
|
||||
|
||||
// All other MongoExceptions
|
||||
if (ex instanceof MongoException) {
|
||||
|
||||
@@ -135,6 +129,7 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
||||
return new MongoTransactionException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
* <strong>Ignored Properties</strong>
|
||||
* <ul>
|
||||
* <li>All properties annotated with {@link org.springframework.data.annotation.Transient}</li>
|
||||
* </ul>
|
||||
* <strong>Property Type Mapping</strong>
|
||||
* <ul>
|
||||
* <li>{@link java.lang.Object} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.util.Arrays} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Collection} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Map} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}</li>
|
||||
* <li>Simple Types -> {@code type : 'the corresponding bson type' }</li>
|
||||
* <li>Domain Types -> {@code type : 'object', properties : {the types properties} }</li>
|
||||
* </ul>
|
||||
* <br />
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface MongoJsonSchemaCreator {
|
||||
|
||||
/**
|
||||
* Create the {@link MongoJsonSchema} for the given {@link Class type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
*
|
||||
* @param mongoConverter must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
|
||||
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,7 @@ import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -40,13 +41,13 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.Cursor;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.ClientSession;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -57,7 +58,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <p />
|
||||
* <p/>
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -175,7 +176,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <p />
|
||||
* <p/>
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
|
||||
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
|
||||
*
|
||||
@@ -211,7 +212,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* <p />
|
||||
* <p/>
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -222,9 +223,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
||||
* {@link Cursor}.
|
||||
* {@link com.mongodb.client.FindIterable}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -237,9 +239,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed
|
||||
* by a Mongo DB {@link Cursor}.
|
||||
* by a Mongo DB {@link com.mongodb.client.FindIterable}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||
* be closed.
|
||||
*
|
||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||
* specification. Must not be {@literal null}.
|
||||
@@ -358,11 +361,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
IndexOperations indexOps(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level.
|
||||
* Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level.
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
@@ -434,7 +439,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
@@ -449,7 +458,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} and
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy,
|
||||
Class<T> entityClass);
|
||||
|
||||
@@ -505,11 +519,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> AggregationResults<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class. The name of the inputCollection is derived from the inputType of
|
||||
* the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* <p>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -524,11 +538,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class and are returned as stream. The name of the inputCollection is
|
||||
* derived from the inputType of the aggregation.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -542,10 +556,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -561,10 +575,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link Cursor}.
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw
|
||||
* results will be mapped to the given entity class.
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
@@ -637,24 +651,52 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected. Must not be {@literal null} nor empty.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -844,12 +886,15 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -857,13 +902,16 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated or {@literal null}, if not found.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -872,15 +920,18 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parametrized type.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
@@ -889,16 +940,19 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated or {@literal null}, if not found. Depending on the value of
|
||||
* {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as
|
||||
* it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
@Nullable
|
||||
<T> T findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
@@ -1101,6 +1155,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1112,7 +1171,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1123,7 +1186,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}.
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1141,8 +1208,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1203,8 +1270,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1234,99 +1301,111 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass);
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, Update update, String collectionName);
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* combining the query document and the update document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult upsert(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
* the provided update document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass);
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, Update update, String collectionName);
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateFirst(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1334,27 +1413,34 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass);
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the specified collection that matches the query document criteria with the
|
||||
* provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support.
|
||||
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, Update update, String collectionName);
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1362,16 +1448,22 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
UpdateResult updateMulti(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by id.
|
||||
* Remove the given object from the collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
|
||||
@@ -1379,7 +1471,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
DeleteResult remove(Object object);
|
||||
|
||||
/**
|
||||
* Removes the given object from the given collection.
|
||||
* Removes the given object from the given collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,737 @@
|
||||
/*
|
||||
* Copyright 2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
|
||||
/**
|
||||
* {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed.
|
||||
* This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options
|
||||
* for {@literal count}, {@literal remove}, and other methods.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0
|
||||
*/
|
||||
class QueryOperations {
|
||||
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final EntityOperations entityOperations;
|
||||
private final CodecRegistryProvider codecRegistryProvider;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final AggregationUtil aggregationUtil;
|
||||
private final Map<Class<?>, Document> mappedShardKey = new ConcurrentHashMap<>(1);
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link QueryOperations}.
|
||||
*
|
||||
* @param queryMapper must not be {@literal null}.
|
||||
* @param updateMapper must not be {@literal null}.
|
||||
* @param entityOperations must not be {@literal null}.
|
||||
* @param codecRegistryProvider must not be {@literal null}.
|
||||
*/
|
||||
QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations,
|
||||
CodecRegistryProvider codecRegistryProvider) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.entityOperations = entityOperations;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.mappingContext = queryMapper.getMappingContext();
|
||||
this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link QueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
QueryContext createQueryContext(Query query) {
|
||||
return new QueryContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link DistinctQueryContext}.
|
||||
*/
|
||||
DistinctQueryContext distinctQueryContext(Query query, String fieldName) {
|
||||
return new DistinctQueryContext(query, fieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link CountContext}.
|
||||
*/
|
||||
CountContext countQueryContext(Query query) {
|
||||
return new CountContext(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting multiple documents.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, true, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance affecting a single document.
|
||||
*
|
||||
* @param updateDefinition must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) {
|
||||
return new UpdateContext(updateDefinition, query, false, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param replacement the {@link MappedDocument mapped replacement} document.
|
||||
* @param upsert use {@literal true} to insert diff when no existing document found.
|
||||
* @return new instance of {@link UpdateContext}.
|
||||
*/
|
||||
UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) {
|
||||
return new UpdateContext(replacement, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance removing all matching documents.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteQueryContext(Query query) {
|
||||
return new DeleteContext(query, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DeleteContext} instance only the first matching document.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @return new instance of {@link QueryContext}.
|
||||
*/
|
||||
DeleteContext deleteSingleContext(Query query) {
|
||||
return new DeleteContext(query, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
|
||||
* representation, mapping fieldnames, as well as determinging and applying {@link Collation collations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class QueryContext {
|
||||
|
||||
private final Query query;
|
||||
|
||||
/**
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be eihter a {@link Query} or a
|
||||
* plain {@link Document}.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
private QueryContext(@Nullable Query query) {
|
||||
this.query = query != null ? query : new Query();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Query getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Document getQueryObject() {
|
||||
return query.getQueryObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the
|
||||
* given{@literal domainType}
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable Class<T> domainType,
|
||||
Function<Class<T>, MongoPersistentEntity<?>> entityLookup) {
|
||||
return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped MongoDB query representation.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> entity) {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped {@link Query#getFieldsObject() fields projection}
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(query.getFieldsObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped {@link Query#getSortObject() sort} option.
|
||||
*
|
||||
* @param entity the Entity to map field names to. Can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document getMappedSort(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedSort(query.getSortObject(), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param consumer must not be {@literal null}.
|
||||
*/
|
||||
void applyCollation(@Nullable Class<?> domainType, Consumer<com.mongodb.client.model.Collation> consumer) {
|
||||
getCollation(domainType).ifPresent(consumer::accept);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to
|
||||
* the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation()
|
||||
* collation}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Optional<com.mongodb.client.model.Collation> getCollation(@Nullable Class<?> domainType) {
|
||||
|
||||
return entityOperations.forType(domainType).getCollation(query) //
|
||||
.map(Collation::toMongoCollation);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DistinctQueryContext extends QueryContext {
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
/**
|
||||
* Create a new {@link DistinctQueryContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param fieldName must not be {@literal null}.
|
||||
*/
|
||||
private DistinctQueryContext(@Nullable Object query, String fieldName) {
|
||||
|
||||
super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query);
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return queryMapper.getMappedFields(new Document(fieldName, 1), entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mapped field name to project to.
|
||||
*
|
||||
* @param entity can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getMappedFieldName(@Nullable MongoPersistentEntity<?> entity) {
|
||||
return getMappedFields(entity).keySet().iterator().next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the MongoDB native representation of the given {@literal type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
<T> Class<T> getDriverCompatibleClass(Class<T> type) {
|
||||
|
||||
return codecRegistryProvider.getCodecFor(type) //
|
||||
.map(Codec::getEncoderClass) //
|
||||
.orElse((Class<T>) BsonValue.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the most speficic read target type based on the user {@literal requestedTargetType} an the property type
|
||||
* based on meta information extracted from the {@literal domainType}.
|
||||
*
|
||||
* @param requestedTargetType must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Class<?> getMostSpecificConversionTargetType(Class<?> requestedTargetType, Class<?> domainType) {
|
||||
|
||||
Class<?> conversionTargetType = requestedTargetType;
|
||||
try {
|
||||
|
||||
Class<?> propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType();
|
||||
|
||||
// use the more specific type but favor UserType over property one
|
||||
if (ClassUtils.isAssignable(requestedTargetType, propertyType)) {
|
||||
conversionTargetType = propertyType;
|
||||
}
|
||||
} catch (PropertyReferenceException e) {
|
||||
// just don't care about it as we default to Object.class anyway.
|
||||
}
|
||||
|
||||
return conversionTargetType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class CountContext extends QueryContext {
|
||||
|
||||
/**
|
||||
* Creates a new {@link CountContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
*/
|
||||
CountContext(@Nullable Query query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType) {
|
||||
return getCountOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CountOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
CountOptions getCountOptions(@Nullable Class<?> domainType, @Nullable Consumer<CountOptions> callback) {
|
||||
|
||||
CountOptions options = new CountOptions();
|
||||
Query query = getQuery();
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (query.getLimit() > 0) {
|
||||
options.limit(query.getLimit());
|
||||
}
|
||||
if (query.getSkip() > 0) {
|
||||
options.skip((int) query.getSkip());
|
||||
}
|
||||
if (StringUtils.hasText(query.getHint())) {
|
||||
|
||||
String hint = query.getHint();
|
||||
if (BsonUtils.isJsonDocument(hint)) {
|
||||
options.hint(BsonUtils.parse(hint, codecRegistryProvider));
|
||||
} else {
|
||||
options.hintString(hint);
|
||||
}
|
||||
}
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class DeleteContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
|
||||
/**
|
||||
* Crate a new {@link DeleteContext} instance.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one.
|
||||
*/
|
||||
DeleteContext(@Nullable Query query, boolean multi) {
|
||||
|
||||
super(query);
|
||||
this.multi = multi;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType) {
|
||||
return getDeleteOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link DeleteOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
DeleteOptions getDeleteOptions(@Nullable Class<?> domainType, @Nullable Consumer<DeleteOptions> callback) {
|
||||
|
||||
DeleteOptions options = new DeleteOptions();
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents shall be deleted.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}.
|
||||
*/
|
||||
class UpdateContext extends QueryContext {
|
||||
|
||||
private final boolean multi;
|
||||
private final boolean upsert;
|
||||
private final @Nullable UpdateDefinition update;
|
||||
private final @Nullable MappedDocument mappedDocument;
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query must not be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) {
|
||||
this(update, new BasicQuery(query), multi, upsert);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link UpdateContext} instance.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @param query can be {@literal null}.
|
||||
* @param multi use {@literal true} to update all matching documents.
|
||||
* @param upsert use {@literal true} to insert a new document if none match.
|
||||
*/
|
||||
UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) {
|
||||
|
||||
super(query);
|
||||
|
||||
this.multi = multi;
|
||||
this.upsert = upsert;
|
||||
this.update = update;
|
||||
this.mappedDocument = null;
|
||||
}
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
this.update = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType) {
|
||||
return getUpdateOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link UpdateOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
UpdateOptions getUpdateOptions(@Nullable Class<?> domainType, @Nullable Consumer<UpdateOptions> callback) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update != null && update.hasArrayFilters()) {
|
||||
options
|
||||
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
applyCollation(domainType, options::collation);
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType) {
|
||||
return getReplaceOptions(domainType, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReplaceOptions} applicable for the {@link Query}.
|
||||
*
|
||||
* @param domainType can be {@literal null}.
|
||||
* @param callback a callback to modify the generated options. Can be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
ReplaceOptions getReplaceOptions(@Nullable Class<?> domainType, @Nullable Consumer<ReplaceOptions> callback) {
|
||||
|
||||
UpdateOptions updateOptions = getUpdateOptions(domainType);
|
||||
|
||||
ReplaceOptions options = new ReplaceOptions();
|
||||
options.collation(updateOptions.getCollation());
|
||||
options.upsert(updateOptions.isUpsert());
|
||||
|
||||
if (callback != null) {
|
||||
callback.accept(options);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
<T> Document getMappedQuery(@Nullable MongoPersistentEntity<T> domainType) {
|
||||
|
||||
Document mappedQuery = super.getMappedQuery(domainType);
|
||||
|
||||
if (multi && update.isIsolated() && !mappedQuery.containsKey("$isolated")) {
|
||||
mappedQuery.put("$isolated", 1);
|
||||
}
|
||||
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
<T> Document applyShardKey(MongoPersistentEntity<T> domainType, Document filter, @Nullable Document existing) {
|
||||
|
||||
Document shardKeySource = existing != null ? existing
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
|
||||
boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity<?> domainType) {
|
||||
|
||||
return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType)
|
||||
&& !filter.keySet().containsAll(getMappedShardKeyFields(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities
|
||||
* {@literal id} property.
|
||||
* @since 3.0
|
||||
*/
|
||||
private boolean shardedById(MongoPersistentEntity<?> domainType) {
|
||||
|
||||
ShardKey shardKey = domainType.getShardKey();
|
||||
if (shardKey.size() != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String key = shardKey.getPropertyNames().iterator().next();
|
||||
if ("_id".equals(key)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentProperty idProperty = domainType.getIdProperty();
|
||||
return idProperty != null && idProperty.getName().equals(key);
|
||||
}
|
||||
|
||||
Set<String> getMappedShardKeyFields(MongoPersistentEntity<?> entity) {
|
||||
return getMappedShardKey(entity).keySet();
|
||||
}
|
||||
|
||||
Document getMappedShardKey(MongoPersistentEntity<?> entity) {
|
||||
return mappedShardKey.computeIfAbsent(entity.getType(),
|
||||
key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
|
||||
|
||||
AggregationOperationContext context = domainType != null
|
||||
? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the already mapped update {@link Document}.
|
||||
*
|
||||
* @param entity
|
||||
* @return
|
||||
*/
|
||||
Document getMappedUpdate(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (update != null) {
|
||||
return update instanceof MappedUpdate ? update.getUpdateObject()
|
||||
: updateMapper.getMappedObject(update.getUpdateObject(), entity);
|
||||
}
|
||||
return mappedDocument.getDocument();
|
||||
}
|
||||
|
||||
/**
|
||||
* Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not
|
||||
* already done in the actual {@link UpdateDefinition}
|
||||
*
|
||||
* @param persistentEntity can be {@literal null}.
|
||||
*/
|
||||
void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity<?> persistentEntity) {
|
||||
|
||||
if (persistentEntity != null && persistentEntity.hasVersionProperty()) {
|
||||
|
||||
String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName();
|
||||
if (!update.modifies(versionFieldName)) {
|
||||
update.inc(versionFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the update holds an aggregation pipeline.
|
||||
*/
|
||||
boolean isAggregationUpdate() {
|
||||
return update instanceof AggregationUpdate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if all matching documents should be updated.
|
||||
*/
|
||||
boolean isMulti() {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStreamOperation} allows creation and execution of reactive MongoDB
|
||||
* <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> operations in a fluent API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping a potentially given
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} used for filtering. By default, the
|
||||
* originating {@literal domainType} is also used for mapping back the result from the {@link org.bson.Document}.
|
||||
* However, it is possible to define an different {@literal returnType} via {@code as}.<br />
|
||||
* The collection to operate on is optional in which case call collection with the actual database are watched, use
|
||||
* {@literal watchCollection} to define a fixed collection.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* changeStream(Jedi.class)
|
||||
* .watchCollection("star-wars")
|
||||
* .filter(where("operationType").is("insert"))
|
||||
* .resumeAt(Instant.now())
|
||||
* .listen();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface ReactiveChangeStreamOperation {
|
||||
|
||||
/**
|
||||
* Start creating a change stream operation for the given {@literal domainType} watching all collections within the
|
||||
* database. <br />
|
||||
* Consider limiting events be defining a {@link ChangeStreamWithCollection#watchCollection(String) collection} and/or
|
||||
* {@link ChangeStreamWithFilterAndProjection#filter(CriteriaDefinition) filter}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}. Use {@link org.bson.Document} to obtain raw elements.
|
||||
* @return new instance of {@link ReactiveChangeStream}. Never {@literal null}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> ReactiveChangeStream<T> changeStream(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Compose change stream execution by calling one of the terminating methods.
|
||||
*/
|
||||
interface TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
|
||||
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
|
||||
*/
|
||||
Flux<ChangeStreamEvent<T>> listen();
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (optional).
|
||||
*/
|
||||
interface ChangeStreamWithCollection<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to watch.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code collection} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(String collection);
|
||||
|
||||
/**
|
||||
* Set the the collection to watch. Collection name is derived from the {@link Class entityClass}.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code entityClass} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a filter for limiting results (optional).
|
||||
*/
|
||||
interface ChangeStreamWithFilterAndProjection<T> extends ResumingChangeStream<T>, TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Use an {@link Aggregation} to filter matching events.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link Aggregation} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(Aggregation by);
|
||||
|
||||
/**
|
||||
* Use a {@link CriteriaDefinition critera} to filter matching events via an
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation}.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link CriteriaDefinition} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by);
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume a change stream. (optional).
|
||||
*/
|
||||
interface ResumingChangeStream<T> extends TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Resume the change stream at a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(Instant)
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(BsonTimestamp)
|
||||
* @throws IllegalArgumentException if the given beacon is neither {@link Instant} nor {@link BsonTimestamp}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAt(Object token);
|
||||
|
||||
/**
|
||||
* Resume the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAfter(BsonValue)
|
||||
* @see ChangeStreamOptionsBuilder#resumeToken(BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAfter(Object token);
|
||||
|
||||
/**
|
||||
* Start the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#startAfter(BsonValue) (BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> startAfter(Object token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide some options.
|
||||
*/
|
||||
interface ChangeStreamWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Provide some options via the callback by modifying the given {@link ChangeStreamOptionsBuilder}. Previously
|
||||
* defined options like a {@link ResumingChangeStream#resumeAfter(Object) resumeToken} are carried over to the
|
||||
* builder and can be overwritten via eg. {@link ChangeStreamOptionsBuilder#resumeToken(BsonValue)}.
|
||||
*
|
||||
* @param optionsConsumer never {@literal null}.
|
||||
* @return new instance of {@link ReactiveChangeStream}.
|
||||
*/
|
||||
ReactiveChangeStream<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStream} provides methods for constructing change stream operations in a fluent way.
|
||||
*/
|
||||
interface ReactiveChangeStream<T> extends ChangeStreamWithOptions<T>, ChangeStreamWithCollection<T>,
|
||||
TerminatingChangeStream<T>, ResumingChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {}
|
||||
}
|
||||
@@ -0,0 +1,230 @@
|
||||
/*
|
||||
* Copyright 2019-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.MatchOperation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperation {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
/**
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
ReactiveChangeStreamOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation#changeStream(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> ReactiveChangeStream<T> changeStream(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null);
|
||||
}
|
||||
|
||||
static class ReactiveChangeStreamSupport<T>
|
||||
implements ReactiveChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final @Nullable String collection;
|
||||
private final @Nullable ChangeStreamOptions options;
|
||||
|
||||
private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
@Nullable String collection, @Nullable ChangeStreamOptions options) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Collection type not be null!");
|
||||
|
||||
return watchCollection(template.getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAt(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAt(Object token) {
|
||||
|
||||
return withOptions(builder -> {
|
||||
|
||||
if (token instanceof Instant) {
|
||||
builder.resumeAt((Instant) token);
|
||||
} else if (token instanceof BsonTimestamp) {
|
||||
builder.resumeAt((BsonTimestamp) token);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.resumeAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#startAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> startAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.startAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithOptions#withOptions(java.util.function.Consumer)
|
||||
*/
|
||||
@Override
|
||||
public ReactiveChangeStreamSupport<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer) {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = initOptionsBuilder();
|
||||
optionsConsumer.accept(builder);
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, builder.build());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.aggregation.Aggregation)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(Aggregation filter) {
|
||||
return withOptions(builder -> builder.filter(filter));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.query.CriteriaDefinition)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by) {
|
||||
|
||||
MatchOperation $match = Aggregation.match(by);
|
||||
Aggregation aggregation = !Document.class.equals(domainType) ? Aggregation.newAggregation(domainType, $match)
|
||||
: Aggregation.newAggregation($match);
|
||||
return filter(aggregation);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.TerminatingChangeStream#listen()
|
||||
*/
|
||||
@Override
|
||||
public Flux<ChangeStreamEvent<T>> listen() {
|
||||
return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType);
|
||||
}
|
||||
|
||||
private ChangeStreamOptionsBuilder initOptionsBuilder() {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = ChangeStreamOptions.builder();
|
||||
if (options == null) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
options.getFilter().ifPresent(it -> {
|
||||
if (it instanceof Aggregation) {
|
||||
builder.filter((Aggregation) it);
|
||||
} else {
|
||||
builder.filter(((List<Document>) it).toArray(new Document[0]));
|
||||
}
|
||||
});
|
||||
options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup);
|
||||
options.getCollation().ifPresent(builder::collation);
|
||||
|
||||
if (options.isResumeAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::resumeAfter);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAfter);
|
||||
} else if (options.isStartAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::startAfter);
|
||||
} else {
|
||||
options.getResumeTimestamp().ifPresent(builder::resumeAt);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAt);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
@@ -144,6 +145,18 @@ public interface ReactiveFindOperation {
|
||||
*/
|
||||
TerminatingFind<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingFind}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingFind<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter query for the geoNear execution.
|
||||
*
|
||||
|
||||
@@ -23,6 +23,7 @@ import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -31,8 +32,6 @@ import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
|
||||
/**
|
||||
* Implementation of {@link ReactiveFindOperation}.
|
||||
*
|
||||
@@ -120,12 +119,7 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
public Mono<T> first() {
|
||||
|
||||
FindPublisherPreparer preparer = getCursorPreparer(query);
|
||||
Flux<T> result = doFind(new FindPublisherPreparer() {
|
||||
@Override
|
||||
public <D> FindPublisher<D> prepare(FindPublisher<D> publisher) {
|
||||
return preparer.prepare(publisher).limit(1);
|
||||
}
|
||||
});
|
||||
Flux<T> result = doFind(publisher -> preparer.prepare(publisher).limit(1));
|
||||
|
||||
return result.next();
|
||||
}
|
||||
@@ -138,12 +132,7 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
public Mono<T> one() {
|
||||
|
||||
FindPublisherPreparer preparer = getCursorPreparer(query);
|
||||
Flux<T> result = doFind(new FindPublisherPreparer() {
|
||||
@Override
|
||||
public <D> FindPublisher<D> prepare(FindPublisher<D> publisher) {
|
||||
return preparer.prepare(publisher).limit(2);
|
||||
}
|
||||
});
|
||||
Flux<T> result = doFind(publisher -> preparer.prepare(publisher).limit(2));
|
||||
|
||||
return result.collectList().flatMap(it -> {
|
||||
|
||||
|
||||
@@ -23,4 +23,4 @@ package org.springframework.data.mongodb.core;
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation,
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation {}
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation, ReactiveChangeStreamOperation {}
|
||||
|
||||
@@ -19,6 +19,7 @@ import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind;
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
/**
|
||||
@@ -30,7 +31,7 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
* The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there
|
||||
* via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the
|
||||
* collection name for the execution.
|
||||
*
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* mapReduce(Human.class)
|
||||
@@ -146,6 +147,18 @@ public interface ReactiveMapReduceOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingMapReduce<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingMapReduce}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingMapReduce<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -22,7 +22,7 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.reactivestreams.client.MongoClient;
|
||||
import com.mongodb.reactivestreams.client.MongoClients;
|
||||
|
||||
|
||||
@@ -15,192 +15,17 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.async.client.MongoClientSettings;
|
||||
import com.mongodb.connection.ClusterSettings;
|
||||
import com.mongodb.connection.ConnectionPoolSettings;
|
||||
import com.mongodb.connection.ServerSettings;
|
||||
import com.mongodb.connection.SocketSettings;
|
||||
import com.mongodb.connection.SslSettings;
|
||||
import com.mongodb.connection.StreamFactoryFactory;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
* @deprecated since 3.0 - Use {@link MongoClientSettingsFactoryBean} instead.
|
||||
*/
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoClientSettings> {
|
||||
@Deprecated
|
||||
public class ReactiveMongoClientSettingsFactoryBean extends MongoClientSettingsFactoryBean {
|
||||
|
||||
private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
|
||||
|
||||
private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
|
||||
private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
|
||||
private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
|
||||
private List<MongoCredential> credentialList = new ArrayList<>();
|
||||
private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
|
||||
private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
|
||||
private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings();
|
||||
private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings();
|
||||
private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings();
|
||||
private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings();
|
||||
private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings();
|
||||
private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings();
|
||||
|
||||
/**
|
||||
* Set the {@link ReadPreference}.
|
||||
*
|
||||
* @param readPreference
|
||||
*/
|
||||
public void setReadPreference(ReadPreference readPreference) {
|
||||
this.readPreference = readPreference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WriteConcern}.
|
||||
*
|
||||
* @param writeConcern
|
||||
*/
|
||||
public void setWriteConcern(WriteConcern writeConcern) {
|
||||
this.writeConcern = writeConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReadConcern}.
|
||||
*
|
||||
* @param readConcern
|
||||
*/
|
||||
public void setReadConcern(ReadConcern readConcern) {
|
||||
this.readConcern = readConcern;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the List of {@link MongoCredential}s.
|
||||
*
|
||||
* @param credentialList must not be {@literal null}.
|
||||
*/
|
||||
public void setCredentialList(List<MongoCredential> credentialList) {
|
||||
|
||||
Assert.notNull(credentialList, "CredendialList must not be null!");
|
||||
|
||||
this.credentialList.addAll(credentialList);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the {@link MongoCredential} to the list of credentials.
|
||||
*
|
||||
* @param mongoCredential must not be {@literal null}.
|
||||
*/
|
||||
public void addMongoCredential(MongoCredential mongoCredential) {
|
||||
|
||||
Assert.notNull(mongoCredential, "MongoCredential must not be null!");
|
||||
|
||||
this.credentialList.add(mongoCredential);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link StreamFactoryFactory}.
|
||||
*
|
||||
* @param streamFactoryFactory
|
||||
*/
|
||||
public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
|
||||
this.streamFactoryFactory = streamFactoryFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link CodecRegistry}.
|
||||
*
|
||||
* @param codecRegistry
|
||||
*/
|
||||
public void setCodecRegistry(CodecRegistry codecRegistry) {
|
||||
this.codecRegistry = codecRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClusterSettings}.
|
||||
*
|
||||
* @param clusterSettings
|
||||
*/
|
||||
public void setClusterSettings(ClusterSettings clusterSettings) {
|
||||
this.clusterSettings = clusterSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SocketSettings}.
|
||||
*
|
||||
* @param socketSettings
|
||||
*/
|
||||
public void setSocketSettings(SocketSettings socketSettings) {
|
||||
this.socketSettings = socketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the heartbeat {@link SocketSettings}.
|
||||
*
|
||||
* @param heartbeatSocketSettings
|
||||
*/
|
||||
public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) {
|
||||
this.heartbeatSocketSettings = heartbeatSocketSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ConnectionPoolSettings}.
|
||||
*
|
||||
* @param connectionPoolSettings
|
||||
*/
|
||||
public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) {
|
||||
this.connectionPoolSettings = connectionPoolSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ServerSettings}.
|
||||
*
|
||||
* @param serverSettings
|
||||
*/
|
||||
public void setServerSettings(ServerSettings serverSettings) {
|
||||
this.serverSettings = serverSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SslSettings}.
|
||||
*
|
||||
* @param sslSettings
|
||||
*/
|
||||
public void setSslSettings(SslSettings sslSettings) {
|
||||
this.sslSettings = sslSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MongoClientSettings createInstance() throws Exception {
|
||||
|
||||
return MongoClientSettings.builder() //
|
||||
.readPreference(readPreference) //
|
||||
.writeConcern(writeConcern) //
|
||||
.readConcern(readConcern) //
|
||||
.credentialList(credentialList) //
|
||||
.streamFactoryFactory(streamFactoryFactory) //
|
||||
.codecRegistry(codecRegistry) //
|
||||
.clusterSettings(clusterSettings) //
|
||||
.socketSettings(socketSettings) //
|
||||
.heartbeatSocketSettings(heartbeatSocketSettings) //
|
||||
.connectionPoolSettings(connectionPoolSettings) //
|
||||
.serverSettings(serverSettings) //
|
||||
.sslSettings(sslSettings) //
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,7 @@ import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -39,7 +40,9 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
@@ -220,7 +223,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
*
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction();
|
||||
|
||||
/**
|
||||
@@ -235,7 +240,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction(Publisher<ClientSession> sessionProvider);
|
||||
|
||||
/**
|
||||
@@ -619,24 +626,52 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB
|
||||
* limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect
|
||||
* a particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -645,11 +680,14 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass);
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -657,12 +695,15 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated before it was updated.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, Update update, Class<T> entityClass, String collectionName);
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -671,13 +712,16 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification.
|
||||
* @param update the {@link Update} to apply on matching documents.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information.
|
||||
* @param entityClass the parametrized type.
|
||||
* @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()}
|
||||
* this will either be the object as it was before the update or as it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
@@ -686,14 +730,17 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
* fields specification. Must not be {@literal null}.
|
||||
* @param update the {@link Update} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @param entityClass the parametrized type. Must not be {@literal null}.
|
||||
* @param collectionName the collection to query. Must not be {@literal null}.
|
||||
* @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()}
|
||||
* this will either be the object as it was before the update or as it is after the update.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
<T> Mono<T> findAndModify(Query query, Update update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass,
|
||||
String collectionName);
|
||||
|
||||
/**
|
||||
@@ -887,6 +934,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -898,7 +950,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -909,7 +965,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}.
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -927,8 +987,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -987,8 +1047,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1035,8 +1095,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1072,8 +1132,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1103,99 +1163,111 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass);
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
* domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, Update update, String collectionName);
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, String collectionName);
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
* combining the query document and the update document.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
* object. Must not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing object. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> upsert(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> upsert(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
* Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class that determines the collection to use.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass);
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, String collectionName);
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateFirst(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> updateFirst(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1203,27 +1275,34 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass);
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the specified collection that matches the query document criteria with the
|
||||
* provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support.
|
||||
* domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific
|
||||
* support.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, String collectionName);
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, String collectionName);
|
||||
|
||||
/**
|
||||
* Updates all objects that are found in the collection for the entity class that matches the query document criteria
|
||||
@@ -1231,13 +1310,16 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing. Must
|
||||
* not be {@literal null}.
|
||||
* @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate
|
||||
* the existing. Must not be {@literal null}.
|
||||
* @param entityClass class of the pojo to be operated on. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to update the object in. Must not be {@literal null}.
|
||||
* @return the {@link UpdateResult} which lets you access the results of the previous write.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
Mono<UpdateResult> updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
Mono<UpdateResult> updateMulti(Query query, UpdateDefinition update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by id.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
@@ -106,6 +107,18 @@ public interface ReactiveRemoveOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
TerminatingRemove<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingRemove}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default TerminatingRemove<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
interface ReactiveRemove<T> extends RemoveWithCollection<T> {}
|
||||
|
||||
@@ -17,8 +17,11 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
|
||||
import com.mongodb.client.result.UpdateResult;
|
||||
|
||||
@@ -118,13 +121,16 @@ public interface ReactiveUpdateOperation {
|
||||
interface UpdateWithUpdate<T> {
|
||||
|
||||
/**
|
||||
* Set the {@link org.springframework.data.mongodb.core.query.Update} to be applied.
|
||||
* Set the {@link UpdateDefinition} to be applied.
|
||||
*
|
||||
* @param update must not be {@literal null}.
|
||||
* @return new instance of {@link TerminatingUpdate}. Never {@literal null}.
|
||||
* @throws IllegalArgumentException if update is {@literal null}.
|
||||
* @since 3.0
|
||||
* @see Update
|
||||
* @see AggregationUpdate
|
||||
*/
|
||||
TerminatingUpdate<T> apply(org.springframework.data.mongodb.core.query.Update update);
|
||||
TerminatingUpdate<T> apply(UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Specify {@code replacement} object.
|
||||
@@ -166,6 +172,18 @@ public interface ReactiveUpdateOperation {
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
*/
|
||||
UpdateWithUpdate<T> matching(Query query);
|
||||
|
||||
/**
|
||||
* Set the filter {@link CriteriaDefinition criteria} to be used.
|
||||
*
|
||||
* @param criteriaDefinition must not be {@literal null}.
|
||||
* @return new instance of {@link UpdateWithUpdate}.
|
||||
* @throws IllegalArgumentException if query is {@literal null}.
|
||||
* @since 3.0
|
||||
*/
|
||||
default UpdateWithUpdate<T> matching(CriteriaDefinition criteriaDefinition) {
|
||||
return matching(Query.query(criteriaDefinition));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user