Compare commits
396 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
492f09fbdf | ||
|
|
e3e73f5351 | ||
|
|
e5aff2645b | ||
|
|
b3c0fbb02d | ||
|
|
51de522e88 | ||
|
|
d2842b246f | ||
|
|
1f954f45b4 | ||
|
|
c14d7bf616 | ||
|
|
ec7cfa3b8e | ||
|
|
f1cff3cdaa | ||
|
|
0fd1273ed9 | ||
|
|
16798cb3c2 | ||
|
|
963072ec0a | ||
|
|
7f64c020b4 | ||
|
|
a93854fb09 | ||
|
|
b6cd129c93 | ||
|
|
c532ec343a | ||
|
|
a44e240402 | ||
|
|
9b82ede047 | ||
|
|
e1986373fd | ||
|
|
5407456973 | ||
|
|
31f0aa348d | ||
|
|
28abf1c15b | ||
|
|
2deede7513 | ||
|
|
5a48825439 | ||
|
|
f4a3e293e8 | ||
|
|
f0697db32b | ||
|
|
2cc5e427bc | ||
|
|
a8f08bab86 | ||
|
|
19211a0f8e | ||
|
|
9e0c24435c | ||
|
|
19b1e713b2 | ||
|
|
af26bb6b31 | ||
|
|
d78f47f035 | ||
|
|
8cd956e90a | ||
|
|
49cc6a708d | ||
|
|
0bf472a29b | ||
|
|
2de00cdb2f | ||
|
|
05c38b819f | ||
|
|
c5674d9264 | ||
|
|
048af85be0 | ||
|
|
110877eb04 | ||
|
|
5d571005bb | ||
|
|
57688d8642 | ||
|
|
d6227e52f9 | ||
|
|
7e6e029352 | ||
|
|
12c4bf6361 | ||
|
|
98795cb33e | ||
|
|
fa63efcb24 | ||
|
|
5ffaa79f4e | ||
|
|
f775d485c6 | ||
|
|
370b4145d2 | ||
|
|
4b78ef6523 | ||
|
|
5163e544ae | ||
|
|
431512a66c | ||
|
|
532b460067 | ||
|
|
af846a962a | ||
|
|
776dadeac8 | ||
|
|
629dfc187e | ||
|
|
289438b1e4 | ||
|
|
83958ba316 | ||
|
|
3a99d4c29a | ||
|
|
561c3d4f39 | ||
|
|
7f74794a11 | ||
|
|
238d8c5ed0 | ||
|
|
c096caac7d | ||
|
|
c794acaf61 | ||
|
|
339db9d1b8 | ||
|
|
c04fe744cb | ||
|
|
a16558e4a3 | ||
|
|
0675b052c6 | ||
|
|
6b85edfa84 | ||
|
|
f4ec21792f | ||
|
|
67bd722cfd | ||
|
|
86dd81f770 | ||
|
|
2f146dd142 | ||
|
|
0ba857aa22 | ||
|
|
a94ea17e0e | ||
|
|
3b99fa0fb4 | ||
|
|
4b0c0274e8 | ||
|
|
83217f3413 | ||
|
|
aaa1450b2a | ||
|
|
89c6099a3c | ||
|
|
7b44f78133 | ||
|
|
af2076d4a5 | ||
|
|
79c6427cc9 | ||
|
|
d54f2e47b0 | ||
|
|
cc3b33e885 | ||
|
|
b081089df4 | ||
|
|
414cf51f98 | ||
|
|
8bbc64317d | ||
|
|
5f48ee5644 | ||
|
|
5733a00f54 | ||
|
|
894acbb0fc | ||
|
|
d70a9ed7c6 | ||
|
|
27de3680b4 | ||
|
|
244b949b6d | ||
|
|
29b9123f5b | ||
|
|
a2296534c7 | ||
|
|
dc8e9d2e0f | ||
|
|
25f610cc8a | ||
|
|
d8c04f0ec9 | ||
|
|
85826e1fe0 | ||
|
|
14a722f2fd | ||
|
|
9d0afc975a | ||
|
|
eaa6393798 | ||
|
|
7d485d732a | ||
|
|
aff4e4fd02 | ||
|
|
18413586fb | ||
|
|
aeea743921 | ||
|
|
e5bba39c62 | ||
|
|
c2f708a37a | ||
|
|
7f50fe1cb7 | ||
|
|
a2127a4da9 | ||
|
|
67215f1209 | ||
|
|
3b33f90e5c | ||
|
|
3b7b1ace8b | ||
|
|
cd63501680 | ||
|
|
0020499d4e | ||
|
|
f4b85242d4 | ||
|
|
a416441427 | ||
|
|
e3ef84a56c | ||
|
|
3ab78fc1ed | ||
|
|
fa0f026410 | ||
|
|
9c96a2b2c3 | ||
|
|
0986210221 | ||
|
|
7d5372f049 | ||
|
|
a5022e9bc4 | ||
|
|
aff8fbd62a | ||
|
|
633fbceb5a | ||
|
|
fb9a0d8482 | ||
|
|
d73807df1b | ||
|
|
e56f6ce87f | ||
|
|
c5c6fc107c | ||
|
|
368c644922 | ||
|
|
4d050f5021 | ||
|
|
83923e0e2a | ||
|
|
25588850dd | ||
|
|
55c81f4f54 | ||
|
|
ac7551e47f | ||
|
|
6d3043de9a | ||
|
|
1a94b6e4ee | ||
|
|
33902b5061 | ||
|
|
d00db4bd40 | ||
|
|
a5dcbf043a | ||
|
|
c31203582f | ||
|
|
f146afecdc | ||
|
|
324a541a64 | ||
|
|
6b71d773d7 | ||
|
|
10447afe0c | ||
|
|
c9dfd60f0f | ||
|
|
26a8fafd03 | ||
|
|
00f652a094 | ||
|
|
d050ae5732 | ||
|
|
8bcab93588 | ||
|
|
1839f55055 | ||
|
|
4220df5bf8 | ||
|
|
95c6d1531f | ||
|
|
b7ed099e06 | ||
|
|
7e2e546e55 | ||
|
|
7ce2ebe26e | ||
|
|
fbf4d1baa8 | ||
|
|
187f260fe4 | ||
|
|
04411075b4 | ||
|
|
459a9c191b | ||
|
|
137cba8bbb | ||
|
|
548cbd87b6 | ||
|
|
02647ad125 | ||
|
|
fe549f7254 | ||
|
|
c069e094e6 | ||
|
|
62f3656ebb | ||
|
|
cbc718e03d | ||
|
|
23be69b9ce | ||
|
|
b7b5b085b3 | ||
|
|
2d292c50b0 | ||
|
|
f959a77890 | ||
|
|
c04c3d66a3 | ||
|
|
b204c1b33e | ||
|
|
dfa029b341 | ||
|
|
04a8c47cda | ||
|
|
88ea57f2be | ||
|
|
521bbd2535 | ||
|
|
0474632640 | ||
|
|
8aab5e5a01 | ||
|
|
3db5fc728e | ||
|
|
b027f15a4c | ||
|
|
fd0a554d59 | ||
|
|
d4daa305a8 | ||
|
|
2d63d6006d | ||
|
|
5007e68cc1 | ||
|
|
3ea4e0f9dd | ||
|
|
e9ac77c058 | ||
|
|
daef8b6e8e | ||
|
|
f671a9bd43 | ||
|
|
57b52862c8 | ||
|
|
af917b9465 | ||
|
|
ee545487b8 | ||
|
|
240e53794c | ||
|
|
7b6a06888d | ||
|
|
034a3528af | ||
|
|
ff28789507 | ||
|
|
cdfdeafdac | ||
|
|
16a35e0329 | ||
|
|
79f05c3d7f | ||
|
|
9217821472 | ||
|
|
8d223abd05 | ||
|
|
6a973b245f | ||
|
|
714b23e0ce | ||
|
|
d4a6614c11 | ||
|
|
82ce0abe1a | ||
|
|
dec7c125d6 | ||
|
|
db12c4ba5a | ||
|
|
5bbe481e98 | ||
|
|
fb39c31986 | ||
|
|
dd446472bc | ||
|
|
72d82d3083 | ||
|
|
cdfe2a0b59 | ||
|
|
5525a4fbf9 | ||
|
|
59464f3b3c | ||
|
|
052cfdfd45 | ||
|
|
b31c21bb91 | ||
|
|
5b6e5ca568 | ||
|
|
1a9136c0c1 | ||
|
|
59753bb55a | ||
|
|
8d963fc5da | ||
|
|
c1de745014 | ||
|
|
aa35aaeb70 | ||
|
|
a5725806f5 | ||
|
|
d715414683 | ||
|
|
f2c2451b7d | ||
|
|
5b8d0d08ee | ||
|
|
18186f26e2 | ||
|
|
10acc14c14 | ||
|
|
87effb9013 | ||
|
|
19819680f9 | ||
|
|
2d2f67cc93 | ||
|
|
e9818fe11a | ||
|
|
a63db5586c | ||
|
|
68ab74a5bf | ||
|
|
de33734118 | ||
|
|
c272c7317e | ||
|
|
d7fc605f7b | ||
|
|
3b805b9e03 | ||
|
|
91cca3f2c4 | ||
|
|
2de6384d0f | ||
|
|
ab1c0ff7b8 | ||
|
|
ae2846c5bf | ||
|
|
e88c9cf791 | ||
|
|
fadca10f62 | ||
|
|
40320136f3 | ||
|
|
bc575de3b0 | ||
|
|
09b2afa79d | ||
|
|
96b564eb9a | ||
|
|
38390d3475 | ||
|
|
6937bb519b | ||
|
|
6e4d463053 | ||
|
|
a9d2050806 | ||
|
|
6676389062 | ||
|
|
81f85b8cca | ||
|
|
77f318bd77 | ||
|
|
7c7e70418f | ||
|
|
44a1123034 | ||
|
|
e487c08b0c | ||
|
|
a002d30aa9 | ||
|
|
36ddd26edc | ||
|
|
6197655e98 | ||
|
|
929faea88b | ||
|
|
1fe1c13531 | ||
|
|
838ddb5d26 | ||
|
|
33c7f0980f | ||
|
|
4bbc443a0e | ||
|
|
655dbc9783 | ||
|
|
0d752fd6e6 | ||
|
|
8aabf2fa5e | ||
|
|
ff9d338bd7 | ||
|
|
2a4ee12363 | ||
|
|
a66438fc20 | ||
|
|
0ccc037b8e | ||
|
|
00792192c3 | ||
|
|
e064b505c9 | ||
|
|
7df2bdf8ff | ||
|
|
2f9fc1618e | ||
|
|
9e2aecf4ae | ||
|
|
c32c4beb59 | ||
|
|
d48f3ec535 | ||
|
|
5f16aecd13 | ||
|
|
5fc49b1649 | ||
|
|
1e7dc7ce66 | ||
|
|
234783f442 | ||
|
|
3429350964 | ||
|
|
d130984bdc | ||
|
|
f5378bf825 | ||
|
|
21057c3d17 | ||
|
|
d0a98eb71d | ||
|
|
bc95c4d390 | ||
|
|
d56a4ea77d | ||
|
|
5a09626cbf | ||
|
|
029291a1dd | ||
|
|
989a2596cb | ||
|
|
f5c520dbc8 | ||
|
|
80c843eb20 | ||
|
|
9b136537c0 | ||
|
|
d334c5a44c | ||
|
|
cfd55be95b | ||
|
|
079c5a95aa | ||
|
|
3f6821f11f | ||
|
|
1a868ae35e | ||
|
|
248bcfa177 | ||
|
|
ee076ec02f | ||
|
|
1184d6ee2d | ||
|
|
062b4e8757 | ||
|
|
30a417d810 | ||
|
|
1671f960b6 | ||
|
|
d4cce9ac00 | ||
|
|
8f9576aa42 | ||
|
|
f15fd2a418 | ||
|
|
01656db002 | ||
|
|
84faff6bd4 | ||
|
|
d72e1531d3 | ||
|
|
ac59cf930a | ||
|
|
7bdc8d3aac | ||
|
|
47548a21ea | ||
|
|
5aaa8f79e7 | ||
|
|
1a77b1bc56 | ||
|
|
140fb2e9ea | ||
|
|
b571c8958d | ||
|
|
8d54cae54d | ||
|
|
14a71f0498 | ||
|
|
14c265f3a1 | ||
|
|
440a289ac6 | ||
|
|
9663a2227b | ||
|
|
b134e1916d | ||
|
|
65b02f92b4 | ||
|
|
667b71e073 | ||
|
|
225dbee15f | ||
|
|
c04ceb163b | ||
|
|
711ac343fe | ||
|
|
852a4ecc59 | ||
|
|
7ab2428c64 | ||
|
|
350acf66bc | ||
|
|
ab94a94b2e | ||
|
|
4c77763cd3 | ||
|
|
f197953480 | ||
|
|
44afd4939e | ||
|
|
575917435e | ||
|
|
2db55ab0aa | ||
|
|
79602b7dbe | ||
|
|
d5d2371b9e | ||
|
|
c95e8a5748 | ||
|
|
f0c0a86118 | ||
|
|
3d82e12e6b | ||
|
|
8672808222 | ||
|
|
29fb085d8b | ||
|
|
15cac49f9c | ||
|
|
946deac48c | ||
|
|
02229f291c | ||
|
|
1009491920 | ||
|
|
05730ded1b | ||
|
|
612845f59c | ||
|
|
1f06954952 | ||
|
|
7bcf0322d2 | ||
|
|
7dd2f350eb | ||
|
|
e433375cac | ||
|
|
d16013aa6b | ||
|
|
dab5473740 | ||
|
|
e6fce75dfd | ||
|
|
e75f022844 | ||
|
|
611ece049b | ||
|
|
be2286edf7 | ||
|
|
b99648672b | ||
|
|
28708ce24e | ||
|
|
1c6c703640 | ||
|
|
67b1fe5fbc | ||
|
|
4f6501f140 | ||
|
|
2a3f746cb6 | ||
|
|
90b8ba7246 | ||
|
|
f12648af4c | ||
|
|
e812f89b47 | ||
|
|
f96d700d8d | ||
|
|
32e7f2032d | ||
|
|
43ac1984ab | ||
|
|
28f262309c | ||
|
|
eefe6b3b21 | ||
|
|
d0f2ca9efc | ||
|
|
e6c8ee037a | ||
|
|
e63013deac | ||
|
|
2367379b6d | ||
|
|
a1c483f2e1 | ||
|
|
64b8b500ae | ||
|
|
2d15e37bc7 | ||
|
|
54655b88c0 | ||
|
|
cd395e3324 | ||
|
|
33bdbbe851 | ||
|
|
9f1448df44 | ||
|
|
e3a4bada63 | ||
|
|
dcdf3a2365 |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Mon Oct 11 14:30:24 CEST 2021
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.3/apache-maven-3.8.3-bin.zip
|
||||
#Mon Aug 14 08:53:22 EDT 2023
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip
|
||||
|
||||
2
CI.adoc
2
CI.adoc
@@ -16,7 +16,7 @@ All of these use cases are great reasons to essentially run what the CI server d
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash`
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
|
||||
200
Jenkinsfile
vendored
200
Jenkinsfile
vendored
@@ -1,7 +1,7 @@
|
||||
def p = [:]
|
||||
node {
|
||||
checkout scm
|
||||
p = readProperties interpolate: true, file: 'ci/pipeline.properties'
|
||||
checkout scm
|
||||
p = readProperties interpolate: true, file: 'ci/pipeline.properties'
|
||||
}
|
||||
|
||||
pipeline {
|
||||
@@ -20,77 +20,77 @@ pipeline {
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK (main) + MongoDB 4.0') {
|
||||
stage('Publish JDK (Java 17) + MongoDB 4.4') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-4.4/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.0.version']} ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk17-mongodb-4.4/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (main) + MongoDB 4.4') {
|
||||
stage('Publish JDK (Java 17) + MongoDB 5.0') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk8-mongodb-4.4/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-5.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk8-mongodb-4.4/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.5.0.version']} ci/openjdk17-mongodb-5.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (main) + MongoDB 5.0') {
|
||||
stage('Publish JDK (Java 17) + MongoDB 6.0') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk8-mongodb-5.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-6.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.5.0.version']} ci/openjdk8-mongodb-5.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (LTS) + MongoDB 4.4') {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk17-mongodb-6.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (Java 20) + MongoDB 6.0') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-4.4/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
anyOf {
|
||||
changeset "ci/openjdk20-mongodb-6.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.lts.tag']}", "--build-arg BASE=${p['docker.java.lts.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk16-mongodb-4.4/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk20-mongodb-6.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
@@ -112,19 +112,17 @@ pipeline {
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image("springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -139,74 +137,68 @@ pipeline {
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.4 (main)") {
|
||||
|
||||
stage("test: MongoDB 5.0 (main)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image("springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 5.0 (main)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (LTS)") {
|
||||
stage("test: MongoDB 6.0 (main)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image("springci/spring-data-with-mongodb-4.4:${p['java.lts.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
alwaysPull true
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: MongoDB 6.0 (next)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -228,23 +220,21 @@ pipeline {
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.basic']) {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -v'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.basic']) {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -v'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
153
README.adoc
153
README.adoc
@@ -1,8 +1,8 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities.
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
@@ -93,142 +93,11 @@ and declare the appropriate dependency version.
|
||||
</repository>
|
||||
----
|
||||
|
||||
== Upgrading from 2.x
|
||||
[[upgrading]]
|
||||
== Upgrading
|
||||
|
||||
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||
|
||||
=== XML Namespace
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
| Now exposes a `com.mongodb.client.MongoClient`
|
||||
|
||||
| `<mongo:mongo-client replica-set="..." />`
|
||||
| Was a comma delimited list of replica set members (host/port)
|
||||
| Now defines the replica set name. +
|
||||
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
| `<mongo:db-factory writeConcern="..." />`
|
||||
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||
| W1, W2, W3, UNAKNOWLEDGED, AKNOWLEDGED, JOURNALED, MAJORITY
|
||||
|===
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Referencing a `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `<mongo:mongo-client credentials="..." />`
|
||||
| `<mongo:mongo-client credential="..." />`
|
||||
| Single authentication data instead of list.
|
||||
|
||||
| `<mongo:client-options />`
|
||||
| `<mongo:client-settings />`
|
||||
| See `com.mongodb.MongoClientSettings` for details.
|
||||
|===
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
|
||||
| `<mongo:db-factory connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:mongo-client connection-string="..." />`
|
||||
| Replacement for `uri` and `client-uri`.
|
||||
|
||||
| `<mongo:client-settings />`
|
||||
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||
|
||||
|===
|
||||
|
||||
=== Java Configuration
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
| Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
| `MongoDataIntegrityViolationException`
|
||||
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||
|
||||
| `BulkOperationException`
|
||||
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||
|
||||
| `ReactiveMongoClientFactoryBean`
|
||||
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|
||||
| `ReactiveMongoClientSettingsFactoryBean`
|
||||
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||
|===
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
| 2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
| Creating a `com.mongodb.MongoClientSettings`.
|
||||
|
||||
| `AbstractMongoConfiguration`
|
||||
| `AbstractMongoClientConfiguration` +
|
||||
(Available since 2.1)
|
||||
| Using `com.mongodb.client.MongoClient`.
|
||||
|
||||
| `MongoDbFactory#getLegacyDb()`
|
||||
| -
|
||||
| -
|
||||
|
||||
| `SimpleMongoDbFactory`
|
||||
| `SimpleMongoClientDbFactory` +
|
||||
(Available since 2.1)
|
||||
|
|
||||
|
||||
| `MapReduceOptions#getOutputType()`
|
||||
| `MapReduceOptions#getMapReduceAction()`
|
||||
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||
|
||||
| `Meta\|Query` maxScan & snapshot
|
||||
|
|
||||
|
|
||||
|===
|
||||
|
||||
=== Other Changes
|
||||
|
||||
==== UUID Types
|
||||
|
||||
The MongoDB UUID representation can now be configured with different formats.
|
||||
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||
|
||||
.UUID Codec Configuration
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
static class Config extends AbstractMongoClientConfiguration {
|
||||
|
||||
@Override
|
||||
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||
}
|
||||
|
||||
// ...
|
||||
}
|
||||
----
|
||||
====
|
||||
Instructions for how to upgrade from earlier versions of Spring Data are provided on the project https://github.com/spring-projects/spring-data-commons/wiki[wiki].
|
||||
Follow the links in the https://github.com/spring-projects/spring-data-commons/wiki#release-notes[release notes section] to find the version that you want to upgrade to.
|
||||
|
||||
[[getting-help]]
|
||||
== Getting Help
|
||||
@@ -277,12 +146,12 @@ and accessible from Maven using the Maven configuration noted <<maven-configurat
|
||||
NOTE: Configuration for Gradle is similar to Maven.
|
||||
|
||||
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||
to build a reactive one.
|
||||
|
||||
However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper]
|
||||
and minimally, JDK 8 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||
and minimally, JDK 17 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||
|
||||
In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download]
|
||||
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
||||
@@ -313,7 +182,7 @@ To initialize the replica set, start a mongo client:
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongo
|
||||
MongoDB server version: 5.0.0
|
||||
MongoDB server version: 6.0.0
|
||||
...
|
||||
----
|
||||
|
||||
@@ -341,7 +210,7 @@ Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw
|
||||
$ ./mvnw clean install
|
||||
----
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
||||
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
@@ -10,7 +10,7 @@ All of these use cases are great reasons to essentially run what Concourse does
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
@@ -23,7 +23,7 @@ Since the container is binding to your source, you can make edits from your IDE
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
@@ -36,4 +36,4 @@ IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -7,15 +7,16 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 && \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 && \
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -7,17 +7,18 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||
# MongoDB 5.0 release signing key
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 && \
|
||||
# Needed when MongoDB creates a 5.0 folder.
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update; \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
24
ci/openjdk17-mongodb-6.0/Dockerfile
Normal file
24
ci/openjdk17-mongodb-6.0/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||
# MongoDB 6.0 release signing key
|
||||
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
|
||||
# Needed when MongoDB creates a 6.0 folder.
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
24
ci/openjdk20-mongodb-6.0/Dockerfile
Normal file
24
ci/openjdk20-mongodb-6.0/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||
# MongoDB 6.0 release signing key
|
||||
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
|
||||
# Needed when MongoDB creates a 6.0 folder.
|
||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
|
||||
echo ${TZ} > /etc/timezone
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,21 +0,0 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,23 +0,0 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
ln -T /bin/true /usr/bin/systemctl ; \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
|
||||
rm /usr/bin/systemctl ; \
|
||||
apt-get clean ; \
|
||||
rm -rf /var/lib/apt/lists/* ;
|
||||
@@ -1,24 +1,27 @@
|
||||
# Java versions
|
||||
java.main.tag=8u312-b07-jdk
|
||||
java.next.tag=11.0.13_8-jdk
|
||||
java.lts.tag=17.0.1_12-jdk
|
||||
java.main.tag=17.0.8_7-jdk-focal
|
||||
java.next.tag=20-jdk-jammy
|
||||
|
||||
# Docker container images - standard
|
||||
docker.java.main.image=eclipse-temurin:${java.main.tag}
|
||||
docker.java.next.image=eclipse-temurin:${java.next.tag}
|
||||
docker.java.lts.image=eclipse-temurin:${java.lts.tag}
|
||||
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
|
||||
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
|
||||
|
||||
# Supported versions of MongoDB
|
||||
docker.mongodb.4.0.version=4.0.23
|
||||
docker.mongodb.4.4.version=4.4.4
|
||||
docker.mongodb.5.0.version=5.0.3
|
||||
docker.mongodb.4.4.version=4.4.23
|
||||
docker.mongodb.5.0.version=5.0.19
|
||||
docker.mongodb.6.0.version=6.0.8
|
||||
|
||||
# Supported versions of Redis
|
||||
docker.redis.6.version=6.2.4
|
||||
docker.redis.6.version=6.2.13
|
||||
|
||||
# Supported versions of Cassandra
|
||||
docker.cassandra.3.version=3.11.10
|
||||
docker.cassandra.3.version=3.11.15
|
||||
|
||||
# Docker environment settings
|
||||
docker.java.inside.basic=-v $HOME:/tmp/jenkins-home
|
||||
docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home
|
||||
|
||||
# Credentials
|
||||
docker.registry=
|
||||
docker.credentials=hub.docker.com-springbuildmaster
|
||||
artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c
|
||||
|
||||
50
pom.xml
50
pom.xml
@@ -5,17 +5,17 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-SNAPSHOT</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
<description>MongoDB support for Spring Data</description>
|
||||
<url>https://projects.spring.io/spring-data-mongodb</url>
|
||||
<url>https://spring.io/projects/spring-data-mongodb</url>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.7.0-SNAPSHOT</version>
|
||||
<version>3.2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.7.0-SNAPSHOT</springdata.commons>
|
||||
<mongo>4.4.1</mongo>
|
||||
<springdata.commons>3.2.0-SNAPSHOT</springdata.commons>
|
||||
<mongo>4.10.2</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
@@ -112,6 +112,17 @@
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<scm>
|
||||
<connection>scm:git:https://github.com/spring-projects/spring-data-mongodb.git</connection>
|
||||
<developerConnection>scm:git:git@github.com:spring-projects/spring-data-mongodb.git</developerConnection>
|
||||
<url>https://github.com/spring-projects/spring-data-mongodb</url>
|
||||
</scm>
|
||||
|
||||
<issueManagement>
|
||||
<system>GitHub</system>
|
||||
<url>https://github.com/spring-projects/spring-data-mongodb/issues</url>
|
||||
</issueManagement>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>benchmarks</id>
|
||||
@@ -134,30 +145,19 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<id>spring-snapshot</id>
|
||||
<url>https://repo.spring.io/snapshot</url>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>spring-milestone</id>
|
||||
<url>https://repo.spring.io/milestone</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>spring-plugins-release</id>
|
||||
<url>https://repo.spring.io/plugins-release</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-SNAPSHOT</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -322,7 +322,7 @@ public class AbstractMicrobenchmark {
|
||||
try {
|
||||
ResultsWriter.forUri(uri).write(results);
|
||||
} catch (Exception e) {
|
||||
System.err.println(String.format("Cannot save benchmark results to '%s'. Error was %s.", uri, e));
|
||||
System.err.println(String.format("Cannot save benchmark results to '%s'; Error was %s", uri, e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -96,15 +96,14 @@ class MongoResultsWriter implements ResultsWriter {
|
||||
for (Object key : doc.keySet()) {
|
||||
|
||||
Object value = doc.get(key);
|
||||
if (value instanceof Document) {
|
||||
value = fixDocumentKeys((Document) value);
|
||||
} else if (value instanceof BasicDBObject) {
|
||||
value = fixDocumentKeys(new Document((BasicDBObject) value));
|
||||
if (value instanceof Document document) {
|
||||
value = fixDocumentKeys(document);
|
||||
} else if (value instanceof BasicDBObject basicDBObject) {
|
||||
value = fixDocumentKeys(new Document(basicDBObject));
|
||||
}
|
||||
|
||||
if (key instanceof String) {
|
||||
if (key instanceof String newKey) {
|
||||
|
||||
String newKey = (String) key;
|
||||
if (newKey.contains(".")) {
|
||||
newKey = newKey.replace('.', ',');
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -14,13 +15,18 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-SNAPSHOT</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<properties>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
<dist.key>SDMONGO</dist.key>
|
||||
|
||||
<!-- Observability -->
|
||||
<micrometer-docs-generator.inputPath>${maven.multiModuleProjectDirectory}/spring-data-mongodb/</micrometer-docs-generator.inputPath>
|
||||
<micrometer-docs-generator.inclusionPattern>.*</micrometer-docs-generator.inclusionPattern>
|
||||
<micrometer-docs-generator.outputPath>${maven.multiModuleProjectDirectory}/target/</micrometer-docs-generator.outputPath>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
@@ -29,12 +35,43 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-docs</id>
|
||||
<phase>generate-resources</phase>
|
||||
<goals>
|
||||
<goal>java</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<mainClass>io.micrometer.docs.DocsGeneratorCommand</mainClass>
|
||||
<includePluginDependencies>true</includePluginDependencies>
|
||||
<arguments>
|
||||
<argument>${micrometer-docs-generator.inputPath}</argument>
|
||||
<argument>${micrometer-docs-generator.inclusionPattern}</argument>
|
||||
<argument>${micrometer-docs-generator.outputPath}</argument>
|
||||
</arguments>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-docs-generator</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}
|
||||
</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -11,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-SNAPSHOT</version>
|
||||
<version>4.2.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -110,6 +112,13 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-crypt</artifactId>
|
||||
<version>1.8.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
@@ -122,27 +131,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava-reactive-streams</artifactId>
|
||||
<version>${rxjava-reactive-streams}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex.rxjava2</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava2}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex.rxjava3</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
@@ -152,12 +140,6 @@
|
||||
|
||||
<!-- CDI -->
|
||||
<!-- Dependency order required to build against CDI 1.0 and test with CDI 2.0 -->
|
||||
<dependency>
|
||||
<groupId>org.apache.geronimo.specs</groupId>
|
||||
<artifactId>geronimo-jcdi_2.0_spec</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.interceptor</groupId>
|
||||
@@ -167,31 +149,48 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.enterprise</groupId>
|
||||
<artifactId>cdi-api</artifactId>
|
||||
<groupId>jakarta.enterprise</groupId>
|
||||
<artifactId>jakarta.enterprise.cdi-api</artifactId>
|
||||
<version>${cdi}</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.annotation</groupId>
|
||||
<artifactId>javax.annotation-api</artifactId>
|
||||
<version>${javax-annotation-api}</version>
|
||||
<groupId>jakarta.annotation</groupId>
|
||||
<artifactId>jakarta.annotation-api</artifactId>
|
||||
<version>${jakarta-annotation-api}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans</groupId>
|
||||
<artifactId>openwebbeans-se</artifactId>
|
||||
<classifier>jakarta</classifier>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans</groupId>
|
||||
<artifactId>openwebbeans-spi</artifactId>
|
||||
<classifier>jakarta</classifier>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.openwebbeans</groupId>
|
||||
<artifactId>openwebbeans-impl</artifactId>
|
||||
<classifier>jakarta</classifier>
|
||||
<version>${webbeans}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- JSR 303 Validation -->
|
||||
<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
<groupId>jakarta.validation</groupId>
|
||||
<artifactId>jakarta.validation-api</artifactId>
|
||||
<version>${validation}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
@@ -204,30 +203,37 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-observation</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-tracing</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hibernate.validator</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.4.3.Final</version>
|
||||
<version>7.0.1.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>jakarta.el</groupId>
|
||||
<artifactId>jakarta.el-api</artifactId>
|
||||
<version>4.0.0</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.el</artifactId>
|
||||
<version>3.0.1-b11</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<version>${jodatime}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.threeten</groupId>
|
||||
<artifactId>threetenbp</artifactId>
|
||||
<version>${threetenbp}</version>
|
||||
<artifactId>jakarta.el</artifactId>
|
||||
<version>4.0.2</version>
|
||||
<scope>provided</scope>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
@@ -272,9 +278,9 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.transaction</groupId>
|
||||
<artifactId>jta</artifactId>
|
||||
<version>1.1</version>
|
||||
<groupId>jakarta.transaction</groupId>
|
||||
<artifactId>jakarta.transaction-api</artifactId>
|
||||
<version>2.0.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -305,11 +311,34 @@
|
||||
|
||||
<dependency>
|
||||
<groupId>io.mockk</groupId>
|
||||
<artifactId>mockk</artifactId>
|
||||
<artifactId>mockk-jvm</artifactId>
|
||||
<version>${mockk}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-test</artifactId>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.github.tomakehurst</groupId>
|
||||
<artifactId>wiremock-jre8-standalone</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-tracing-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.micrometer</groupId>
|
||||
<artifactId>micrometer-tracing-integration-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- jMolecules -->
|
||||
|
||||
<dependency>
|
||||
@@ -343,8 +372,11 @@
|
||||
<goal>test-process</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>target/generated-test-sources</outputDirectory>
|
||||
<processor>org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor</processor>
|
||||
<outputDirectory>target/generated-test-sources
|
||||
</outputDirectory>
|
||||
<processor>
|
||||
org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor
|
||||
</processor>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
@@ -364,7 +396,9 @@
|
||||
<exclude>**/ReactivePerformanceTests.java</exclude>
|
||||
</excludes>
|
||||
<systemPropertyVariables>
|
||||
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||
<java.util.logging.config.file>
|
||||
src/test/resources/logging.properties
|
||||
</java.util.logging.config.file>
|
||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||
</systemPropertyVariables>
|
||||
</configuration>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
* Copyright 2021-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -103,19 +103,11 @@ public class BindableMongoExpression implements MongoExpression {
|
||||
return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoExpression#toDocument()
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument() {
|
||||
return target.get();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2017-2021 the original author or authors.
|
||||
* Copyright 2017-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -62,7 +62,7 @@ public interface CodecRegistryProvider {
|
||||
*/
|
||||
default <T> Optional<Codec<T>> getCodecFor(Class<T> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
Assert.notNull(type, "Type must not be null");
|
||||
|
||||
try {
|
||||
return Optional.of(getCodecRegistry().get(type));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2021 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2021 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -102,7 +102,7 @@ public class MongoDatabaseUtils {
|
||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
Assert.notNull(factory, "Factory must not be null");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER
|
||||
|| !TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
@@ -193,19 +193,11 @@ public class MongoDatabaseUtils {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) {
|
||||
|
||||
@@ -214,10 +206,6 @@ public class MongoDatabaseUtils {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public void afterCompletion(int status) {
|
||||
|
||||
@@ -228,10 +216,6 @@ public class MongoDatabaseUtils {
|
||||
super.afterCompletion(status);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceHolderSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Interface for factories creating {@link MongoDatabase} instances.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||
|
||||
/**
|
||||
* Creates a default {@link MongoDatabase} instance.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb() throws DataAccessException {
|
||||
return getMongoDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||
*
|
||||
* @param dbName must not be {@literal null} or empty.
|
||||
* @return never {@literal null}.
|
||||
* @throws DataAccessException
|
||||
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||
return getMongoDatabase(dbName);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
* Copyright 2021-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.springframework.data.domain.ManagedTypes;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public final class MongoManagedTypes implements ManagedTypes {
|
||||
|
||||
private final ManagedTypes delegate;
|
||||
|
||||
private MongoManagedTypes(ManagedTypes types) {
|
||||
this.delegate = types;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps an existing {@link ManagedTypes} object with {@link MongoManagedTypes}.
|
||||
*
|
||||
* @param managedTypes
|
||||
* @return
|
||||
*/
|
||||
public static MongoManagedTypes from(ManagedTypes managedTypes) {
|
||||
return new MongoManagedTypes(managedTypes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method used to construct {@link MongoManagedTypes} from the given array of {@link Class types}.
|
||||
*
|
||||
* @param types array of {@link Class types} used to initialize the {@link ManagedTypes}; must not be {@literal null}.
|
||||
* @return new instance of {@link MongoManagedTypes} initialized from {@link Class types}.
|
||||
*/
|
||||
public static MongoManagedTypes from(Class<?>... types) {
|
||||
return fromIterable(Arrays.asList(types));
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method used to construct {@link MongoManagedTypes} from the given, required {@link Iterable} of
|
||||
* {@link Class types}.
|
||||
*
|
||||
* @param types {@link Iterable} of {@link Class types} used to initialize the {@link ManagedTypes}; must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link MongoManagedTypes} initialized the given, required {@link Iterable} of {@link Class
|
||||
* types}.
|
||||
*/
|
||||
public static MongoManagedTypes fromIterable(Iterable<? extends Class<?>> types) {
|
||||
return from(ManagedTypes.fromIterable(types));
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to return an empty {@link MongoManagedTypes} object.
|
||||
*
|
||||
* @return an empty {@link MongoManagedTypes} object.
|
||||
*/
|
||||
public static MongoManagedTypes empty() {
|
||||
return from(ManagedTypes.empty());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(Consumer<Class<?>> action) {
|
||||
delegate.forEach(action);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -68,7 +68,7 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No session available!");
|
||||
throw new IllegalStateException("No session available");
|
||||
}
|
||||
|
||||
return session;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -100,16 +100,12 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
*/
|
||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null");
|
||||
|
||||
this.dbFactory = dbFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doGetTransaction()
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction() throws TransactionException {
|
||||
|
||||
@@ -118,19 +114,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
return new MongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doBegin(java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
@@ -160,10 +148,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSuspend(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Object doSuspend(Object transaction) throws TransactionException {
|
||||
|
||||
@@ -173,19 +157,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doResume(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void doResume(@Nullable Object transaction, Object suspendedResources) {
|
||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCommit(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected final void doCommit(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
@@ -236,10 +212,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doRollback(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected void doRollback(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
@@ -259,10 +231,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSetRollbackOnly(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||
*/
|
||||
@Override
|
||||
protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException {
|
||||
|
||||
@@ -270,10 +238,6 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCleanupAfterCompletion(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected void doCleanupAfterCompletion(Object transaction) {
|
||||
|
||||
@@ -302,7 +266,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
*/
|
||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||
Assert.notNull(dbFactory, "DbFactory must not be null");
|
||||
this.dbFactory = dbFactory;
|
||||
}
|
||||
|
||||
@@ -325,19 +289,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
return dbFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||
*/
|
||||
@Override
|
||||
public MongoDatabaseFactory getResourceFactory() {
|
||||
return getRequiredDbFactory();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDbFactory();
|
||||
@@ -359,7 +315,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||
|
||||
Assert.state(dbFactory != null,
|
||||
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||
"MongoTransactionManager operates upon a MongoDbFactory; Did you forget to provide one; It's required");
|
||||
|
||||
return dbFactory;
|
||||
}
|
||||
@@ -397,7 +353,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("closed = %b, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
@@ -494,30 +450,22 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
private MongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present. o_O");
|
||||
Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present; o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
TransactionSynchronizationUtils.triggerFlush();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2021 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -136,7 +136,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER) {
|
||||
return getMongoDatabaseOrDefault(dbName, factory);
|
||||
@@ -214,19 +214,11 @@ public class ReactiveMongoDatabaseUtils {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
@@ -237,10 +229,6 @@ public class ReactiveMongoDatabaseUtils {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> afterCompletion(int status) {
|
||||
|
||||
@@ -256,10 +244,6 @@ public class ReactiveMongoDatabaseUtils {
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -104,16 +104,12 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
@Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null");
|
||||
|
||||
this.databaseFactory = databaseFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||
throws TransactionException {
|
||||
@@ -123,19 +119,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||
TransactionDefinition definition) throws TransactionException {
|
||||
@@ -175,10 +163,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||
throws TransactionException {
|
||||
@@ -192,10 +176,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||
Object suspendedResources) {
|
||||
@@ -203,10 +183,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
@@ -243,10 +219,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
return transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) {
|
||||
@@ -268,10 +240,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
@@ -282,10 +250,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||
Object transaction) {
|
||||
@@ -317,7 +281,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
*/
|
||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null");
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
@@ -340,10 +304,6 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDatabaseFactory();
|
||||
@@ -363,7 +323,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||
|
||||
Assert.state(databaseFactory != null,
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory; Did you forget to provide one; It's required");
|
||||
|
||||
return databaseFactory;
|
||||
}
|
||||
@@ -401,7 +361,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("closed = %b, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
@@ -498,30 +458,22 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
|
||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present; o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
throw new UnsupportedOperationException("flush() not supported");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -76,13 +76,13 @@ public class SessionAwareMethodInterceptor<D, C> implements MethodInterceptor {
|
||||
Class<D> databaseType, ClientSessionOperator<D> databaseDecorator, Class<C> collectionType,
|
||||
ClientSessionOperator<C> collectionDecorator) {
|
||||
|
||||
Assert.notNull(session, "ClientSession must not be null!");
|
||||
Assert.notNull(target, "Target must not be null!");
|
||||
Assert.notNull(sessionType, "SessionType must not be null!");
|
||||
Assert.notNull(databaseType, "Database type must not be null!");
|
||||
Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null!");
|
||||
Assert.notNull(collectionType, "Collection type must not be null!");
|
||||
Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null!");
|
||||
Assert.notNull(session, "ClientSession must not be null");
|
||||
Assert.notNull(target, "Target must not be null");
|
||||
Assert.notNull(sessionType, "SessionType must not be null");
|
||||
Assert.notNull(databaseType, "Database type must not be null");
|
||||
Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null");
|
||||
Assert.notNull(collectionType, "Collection type must not be null");
|
||||
Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null");
|
||||
|
||||
this.session = session;
|
||||
this.target = target;
|
||||
@@ -95,10 +95,6 @@ public class SessionAwareMethodInterceptor<D, C> implements MethodInterceptor {
|
||||
this.sessionType = sessionType;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2021 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,12 +16,13 @@
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.UncategorizedDataAccessException;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
public class UncategorizedMongoDbException extends UncategorizedDataAccessException {
|
||||
|
||||
private static final long serialVersionUID = -2336595514062364929L;
|
||||
|
||||
public UncategorizedMongoDbException(String msg, Throwable cause) {
|
||||
public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) {
|
||||
super(msg, cause);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.aot.generate.GenerationContext;
|
||||
import org.springframework.aot.hint.MemberCategory;
|
||||
import org.springframework.aot.hint.TypeReference;
|
||||
import org.springframework.core.annotation.AnnotatedElementUtils;
|
||||
import org.springframework.core.annotation.MergedAnnotations;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory;
|
||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public class LazyLoadingProxyAotProcessor {
|
||||
|
||||
private boolean generalLazyLoadingProxyContributed = false;
|
||||
|
||||
public void registerLazyLoadingProxyIfNeeded(Class<?> type, GenerationContext generationContext) {
|
||||
|
||||
Set<Field> refFields = getFieldsWithAnnotationPresent(type, Reference.class);
|
||||
if (refFields.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
refFields.stream() //
|
||||
.filter(LazyLoadingProxyAotProcessor::isLazyLoading) //
|
||||
.forEach(field -> {
|
||||
|
||||
if (!generalLazyLoadingProxyContributed) {
|
||||
generationContext.getRuntimeHints().proxies().registerJdkProxy(
|
||||
TypeReference.of(org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class),
|
||||
TypeReference.of(org.springframework.aop.SpringProxy.class),
|
||||
TypeReference.of(org.springframework.aop.framework.Advised.class),
|
||||
TypeReference.of(org.springframework.core.DecoratingProxy.class));
|
||||
generalLazyLoadingProxyContributed = true;
|
||||
}
|
||||
|
||||
if (field.getType().isInterface()) {
|
||||
|
||||
List<Class<?>> interfaces = new ArrayList<>(
|
||||
Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces()));
|
||||
interfaces.add(org.springframework.aop.SpringProxy.class);
|
||||
interfaces.add(org.springframework.aop.framework.Advised.class);
|
||||
interfaces.add(org.springframework.core.DecoratingProxy.class);
|
||||
|
||||
generationContext.getRuntimeHints().proxies().registerJdkProxy(interfaces.toArray(Class[]::new));
|
||||
} else {
|
||||
|
||||
Class<?> proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(),
|
||||
LazyLoadingInterceptor::none);
|
||||
|
||||
// see: spring-projects/spring-framework/issues/29309
|
||||
generationContext.getRuntimeHints().reflection().registerType(proxyClass,
|
||||
MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS, MemberCategory.DECLARED_FIELDS);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static boolean isLazyLoading(Field field) {
|
||||
if (AnnotatedElementUtils.isAnnotated(field, DBRef.class)) {
|
||||
return AnnotatedElementUtils.findMergedAnnotation(field, DBRef.class).lazy();
|
||||
}
|
||||
if (AnnotatedElementUtils.isAnnotated(field, DocumentReference.class)) {
|
||||
return AnnotatedElementUtils.findMergedAnnotation(field, DocumentReference.class).lazy();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static Set<Field> getFieldsWithAnnotationPresent(Class<?> type, Class<? extends Annotation> annotation) {
|
||||
|
||||
Set<Field> fields = new LinkedHashSet<>();
|
||||
for (Field field : type.getDeclaredFields()) {
|
||||
if (MergedAnnotations.from(field).get(annotation).isPresent()) {
|
||||
fields.add(field);
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.util.ReactiveWrappers;
|
||||
import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary;
|
||||
import org.springframework.data.util.TypeUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public class MongoAotPredicates {
|
||||
|
||||
public static final Predicate<Class<?>> IS_SIMPLE_TYPE = (type) -> MongoSimpleTypes.HOLDER.isSimpleType(type) || TypeUtils.type(type).isPartOf("org.bson");
|
||||
public static final Predicate<ReactiveLibrary> IS_REACTIVE_LIBARARY_AVAILABLE = (lib) -> ReactiveWrappers.isAvailable(lib);
|
||||
public static final Predicate<ClassLoader> IS_SYNC_CLIENT_PRESENT = (classLoader) -> ClassUtils.isPresent("com.mongodb.client.MongoClient", classLoader);
|
||||
|
||||
public static boolean isReactorPresent() {
|
||||
return IS_REACTIVE_LIBARARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR);
|
||||
}
|
||||
|
||||
public static boolean isSyncClientPresent(@Nullable ClassLoader classLoader) {
|
||||
return IS_SYNC_CLIENT_PRESENT.test(classLoader);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import org.springframework.aot.generate.GenerationContext;
|
||||
import org.springframework.core.ResolvableType;
|
||||
import org.springframework.data.aot.ManagedTypesBeanRegistrationAotProcessor;
|
||||
import org.springframework.data.mongodb.MongoManagedTypes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2022/06
|
||||
*/
|
||||
class MongoManagedTypesBeanRegistrationAotProcessor extends ManagedTypesBeanRegistrationAotProcessor {
|
||||
|
||||
private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor();
|
||||
|
||||
public MongoManagedTypesBeanRegistrationAotProcessor() {
|
||||
setModuleIdentifier("mongo");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isMatch(@Nullable Class<?> beanType, @Nullable String beanName) {
|
||||
return isMongoManagedTypes(beanType) || super.isMatch(beanType, beanName);
|
||||
}
|
||||
|
||||
protected boolean isMongoManagedTypes(@Nullable Class<?> beanType) {
|
||||
return beanType != null && ClassUtils.isAssignable(MongoManagedTypes.class, beanType);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void contributeType(ResolvableType type, GenerationContext generationContext) {
|
||||
|
||||
if (MongoAotPredicates.IS_SIMPLE_TYPE.test(type.toClass())) {
|
||||
return;
|
||||
}
|
||||
|
||||
super.contributeType(type, generationContext);
|
||||
lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type.toClass(), generationContext);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2022-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.aot;
|
||||
|
||||
import static org.springframework.data.mongodb.aot.MongoAotPredicates.*;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.springframework.aot.hint.MemberCategory;
|
||||
import org.springframework.aot.hint.RuntimeHints;
|
||||
import org.springframework.aot.hint.RuntimeHintsRegistrar;
|
||||
import org.springframework.aot.hint.TypeReference;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link RuntimeHintsRegistrar} for repository types and entity callbacks.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 4.0
|
||||
*/
|
||||
class MongoRuntimeHints implements RuntimeHintsRegistrar {
|
||||
|
||||
@Override
|
||||
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
|
||||
|
||||
hints.reflection().registerTypes(
|
||||
Arrays.asList(TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeSaveCallback.class),
|
||||
TypeReference.of(AfterConvertCallback.class), TypeReference.of(AfterSaveCallback.class)),
|
||||
builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS,
|
||||
MemberCategory.INVOKE_PUBLIC_METHODS));
|
||||
|
||||
registerTransactionProxyHints(hints, classLoader);
|
||||
|
||||
if (isReactorPresent()) {
|
||||
|
||||
hints.reflection()
|
||||
.registerTypes(Arrays.asList(TypeReference.of(ReactiveBeforeConvertCallback.class),
|
||||
TypeReference.of(ReactiveBeforeSaveCallback.class), TypeReference.of(ReactiveAfterConvertCallback.class),
|
||||
TypeReference.of(ReactiveAfterSaveCallback.class)),
|
||||
builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS,
|
||||
MemberCategory.INVOKE_PUBLIC_METHODS));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static void registerTransactionProxyHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
|
||||
|
||||
if (MongoAotPredicates.isSyncClientPresent(classLoader)
|
||||
&& ClassUtils.isPresent("org.springframework.aop.SpringProxy", classLoader)) {
|
||||
|
||||
hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoDatabase"),
|
||||
TypeReference.of("org.springframework.aop.SpringProxy"),
|
||||
TypeReference.of("org.springframework.core.DecoratingProxy"));
|
||||
hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoCollection"),
|
||||
TypeReference.of("org.springframework.aop.SpringProxy"),
|
||||
TypeReference.of("org.springframework.core.DecoratingProxy"));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -25,9 +25,7 @@ import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
import com.mongodb.MongoClientSettings.Builder;
|
||||
@@ -80,30 +78,12 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||
* {@link AbstractMongoClientConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||
* overridden to implement alternate behavior.
|
||||
*
|
||||
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||
* entities.
|
||||
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
protected String getMappingBasePackage() {
|
||||
|
||||
Package mappingBasePackage = getClass().getPackage();
|
||||
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)
|
||||
* @see #mongoDbFactory()
|
||||
*/
|
||||
@Bean
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2021 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -84,10 +84,10 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
||||
|
||||
/**
|
||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||
* {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied.
|
||||
*
|
||||
* @see #customConversions()
|
||||
* @see #mongoMappingContext(MongoCustomConversions)
|
||||
* @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)
|
||||
* @see #reactiveMongoDbFactory()
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,10 +30,6 @@ import com.mongodb.ConnectionString;
|
||||
*/
|
||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String connectionString) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2021 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2021 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -34,10 +34,6 @@ import org.w3c.dom.Element;
|
||||
*/
|
||||
class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
@@ -46,10 +42,6 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||
return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -24,7 +24,6 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.beans.BeanMetadataElement;
|
||||
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.BeanDefinitionHolder;
|
||||
import org.springframework.beans.factory.config.RuntimeBeanReference;
|
||||
@@ -64,6 +63,7 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
@@ -80,7 +80,7 @@ import org.w3c.dom.Element;
|
||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
private static final String BASE_PACKAGE = "base-package";
|
||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator",
|
||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("jakarta.validation.Validator",
|
||||
MappingMongoConverterParser.class.getClassLoader());
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -135,9 +135,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider"));
|
||||
}
|
||||
|
||||
try {
|
||||
registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME);
|
||||
} catch (NoSuchBeanDefinitionException ignored) {
|
||||
if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) {
|
||||
|
||||
BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoPersistentEntityIndexCreator.class);
|
||||
@@ -151,7 +149,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext);
|
||||
|
||||
if (validatingMongoEventListener != null) {
|
||||
if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) {
|
||||
parserContext.registerBeanComponent(
|
||||
new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME));
|
||||
}
|
||||
@@ -165,15 +163,16 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) {
|
||||
|
||||
String disableValidation = element.getAttribute("disable-validation");
|
||||
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation);
|
||||
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation);
|
||||
|
||||
if (!validationDisabled) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition();
|
||||
RuntimeBeanReference validator = getValidator(builder, parserContext);
|
||||
RuntimeBeanReference validator = getValidator(element, parserContext);
|
||||
|
||||
if (validator != null) {
|
||||
builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class);
|
||||
builder.getRawBeanDefinition().setSource(element);
|
||||
builder.addConstructorArgValue(validator);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
@@ -195,7 +194,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
validatorDef.setSource(source);
|
||||
validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
|
||||
String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef);
|
||||
parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName));
|
||||
|
||||
return new RuntimeBeanReference(validatorName);
|
||||
}
|
||||
@@ -255,7 +253,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
&& Boolean.parseBoolean(abbreviateFieldNames);
|
||||
|
||||
if (fieldNamingStrategyReferenced && abbreviationActivated) {
|
||||
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!",
|
||||
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured",
|
||||
element);
|
||||
return;
|
||||
}
|
||||
@@ -376,10 +374,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
this.delegates = new HashSet<>(Arrays.asList(filters));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.type.filter.TypeFilter#match(org.springframework.core.type.classreading.MetadataReader, org.springframework.core.type.classreading.MetadataReaderFactory)
|
||||
*/
|
||||
public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
|
||||
throws IOException {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2021 the original author or authors.
|
||||
* Copyright 2012-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -47,28 +47,16 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getBeanClass(Element element) {
|
||||
return AuditingEntityCallback.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#shouldGenerateId()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldGenerateId() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder)
|
||||
*/
|
||||
@Override
|
||||
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2013-2021 the original author or authors.
|
||||
* Copyright 2013-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,11 +18,10 @@ package org.springframework.data.mongodb.config;
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.core.Ordered;
|
||||
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
@@ -36,68 +35,42 @@ import org.springframework.util.Assert;
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport implements Ordered {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||
*/
|
||||
@Override
|
||||
protected String getAuditingHandlerBeanName() {
|
||||
return "mongoAuditingHandler";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) {
|
||||
protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
super.registerBeanDefinitions(annotationMetadata, registry);
|
||||
builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
return configureDefaultAuditHandlerAttributes(configuration,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null");
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||
@@ -108,4 +81,8 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getOrder() {
|
||||
return Ordered.LOWEST_PRECEDENCE;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -35,10 +35,6 @@ import org.w3c.dom.Element;
|
||||
*/
|
||||
public class MongoClientParser implements BeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
public BeanDefinition parse(Element element, ParserContext parserContext) {
|
||||
|
||||
Object source = parserContext.extractSource(element);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2021 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -30,6 +30,7 @@ import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.MongoManagedTypes;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
@@ -76,14 +77,13 @@ public abstract class MongoConfigurationSupport {
|
||||
*
|
||||
* @see #getMappingBasePackages()
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||
throws ClassNotFoundException {
|
||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions,
|
||||
MongoManagedTypes mongoManagedTypes) {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setManagedTypes(mongoManagedTypes);
|
||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
@@ -91,6 +91,16 @@ public abstract class MongoConfigurationSupport {
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return new instance of {@link MongoManagedTypes}.
|
||||
* @throws ClassNotFoundException
|
||||
* @since 4.0
|
||||
*/
|
||||
@Bean
|
||||
public MongoManagedTypes mongoManagedTypes() throws ClassNotFoundException {
|
||||
return MongoManagedTypes.fromIterable(getInitialEntitySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the
|
||||
@@ -196,7 +206,7 @@ public abstract class MongoConfigurationSupport {
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal false} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* <strong>INFO:</strong> As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -51,10 +51,6 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
private static final String OPTIONS_DELIMITER = "?";
|
||||
private static final String OPTION_VALUE_DELIMITER = "&";
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String text) throws IllegalArgumentException {
|
||||
|
||||
@@ -121,7 +117,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'", authMechanism));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -198,7 +194,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
String[] optionArgs = option.split("=");
|
||||
|
||||
if (optionArgs.length == 1) {
|
||||
throw new IllegalArgumentException(String.format("Query parameter '%s' has no value!", optionArgs[0]));
|
||||
throw new IllegalArgumentException(String.format("Query parameter '%s' has no value", optionArgs[0]));
|
||||
}
|
||||
|
||||
properties.put(optionArgs[0], optionArgs[1]);
|
||||
@@ -213,21 +209,21 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
if (source.length != 2) {
|
||||
throw new IllegalArgumentException(
|
||||
"Credentials need to specify username and password like in 'username:password@database'!");
|
||||
"Credentials need to specify username and password like in 'username:password@database'");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyDatabasePresent(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!");
|
||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'");
|
||||
}
|
||||
}
|
||||
|
||||
private static void verifyUserNamePresent(String[] source) {
|
||||
|
||||
if (source.length == 0 || !StringUtils.hasText(source[0])) {
|
||||
throw new IllegalArgumentException("Credentials need to specify username!");
|
||||
throw new IllegalArgumentException("Credentials need to specify username");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,7 +231,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
try {
|
||||
return URLDecoder.decode(it, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalArgumentException("o_O UTF-8 not supported!", e);
|
||||
throw new IllegalArgumentException("o_O UTF-8 not supported", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -62,10 +62,6 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
@@ -74,10 +70,6 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
@@ -171,7 +163,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually",
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,10 +26,6 @@ import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
|
||||
*/
|
||||
public class MongoNamespaceHandler extends NamespaceHandlerSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.NamespaceHandler#init()
|
||||
*/
|
||||
public void init() {
|
||||
|
||||
registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -40,7 +40,6 @@ import org.w3c.dom.Element;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class MongoParsingUtils {
|
||||
|
||||
private MongoParsingUtils() {}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -39,10 +39,6 @@ import org.w3c.dom.Element;
|
||||
*/
|
||||
class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||
throws BeanDefinitionStoreException {
|
||||
@@ -51,10 +47,6 @@ class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||
return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -41,19 +41,11 @@ public class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEnti
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public PersistentEntities getObject() {
|
||||
return PersistentEntities.of(converter.getMappingContext());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return PersistentEntities.class;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -18,11 +18,9 @@ package org.springframework.data.mongodb.config;
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
@@ -34,56 +32,42 @@ import org.springframework.util.Assert;
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 3.1
|
||||
*/
|
||||
class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableReactiveMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||
*/
|
||||
@Override
|
||||
protected String getAuditingHandlerBeanName() {
|
||||
return "reactiveMongoAuditingHandler";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration,
|
||||
BeanDefinitionRegistry registry) {
|
||||
builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
return configureDefaultAuditHandlerAttributes(configuration,
|
||||
BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,10 +32,6 @@ import com.mongodb.ReadConcernLevel;
|
||||
*/
|
||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readConcernString) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,10 +29,6 @@ import com.mongodb.ReadPreference;
|
||||
*/
|
||||
public class ReadPreferencePropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -43,13 +43,9 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'. Check your replica set configuration!";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'; Check your replica set configuration";
|
||||
private static final Log LOG = LogFactory.getLog(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String replicaSetString) {
|
||||
|
||||
@@ -72,7 +68,7 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
if (serverAddresses.isEmpty()) {
|
||||
throw new IllegalArgumentException(
|
||||
"Could not resolve at least one server of the replica set configuration! Validate your config!");
|
||||
"Could not resolve at least one server of the replica set configuration; Validate your config");
|
||||
}
|
||||
|
||||
setValue(serverAddresses.toArray(new ServerAddress[serverAddresses.size()]));
|
||||
@@ -129,7 +125,7 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
*/
|
||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null");
|
||||
|
||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||
String hostAddress = hostAndPort[0];
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2012-2021 the original author or authors.
|
||||
* Copyright 2012-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,10 +26,6 @@ import com.mongodb.WriteConcern;
|
||||
*/
|
||||
public class StringToWriteConcernConverter implements Converter<String, WriteConcern> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
public WriteConcern convert(String source) {
|
||||
|
||||
WriteConcern writeConcern = WriteConcern.valueOf(source);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
* Copyright 2020-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -29,10 +29,6 @@ import org.springframework.util.StringUtils;
|
||||
*/
|
||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public void setAsText(@Nullable String value) {
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -15,8 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -24,23 +22,16 @@ import java.util.stream.Collectors;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Utility methods to map {@link org.springframework.data.mongodb.core.aggregation.Aggregation} pipeline definitions and
|
||||
@@ -104,12 +95,7 @@ class AggregationUtil {
|
||||
* @return
|
||||
*/
|
||||
List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
return aggregation.toPipeline(context);
|
||||
}
|
||||
|
||||
return mapAggregationPipeline(aggregation.toPipeline(context));
|
||||
return aggregation.toPipeline(context);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -120,16 +106,7 @@ class AggregationUtil {
|
||||
* @return
|
||||
*/
|
||||
Document createCommand(String collection, Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
Document command = aggregation.toDocument(collection, context);
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
return command;
|
||||
}
|
||||
|
||||
command.put("pipeline", mapAggregationPipeline(command.get("pipeline", List.class)));
|
||||
|
||||
return command;
|
||||
return aggregation.toDocument(collection, context);
|
||||
}
|
||||
|
||||
private List<Document> mapAggregationPipeline(List<Document> pipeline) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -19,16 +19,30 @@ import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.Pair;
|
||||
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
|
||||
/**
|
||||
* Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB
|
||||
* 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add
|
||||
* multiple single operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and
|
||||
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
|
||||
* operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||
* {@link #execute()}.
|
||||
*
|
||||
* <pre class="code">
|
||||
* MongoOperations ops = …;
|
||||
*
|
||||
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
|
||||
* .insert(newPerson)
|
||||
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
|
||||
* .execute();
|
||||
* </pre>
|
||||
* <p>
|
||||
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
|
||||
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
|
||||
* the version field remains not populated.
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Minsu Kim
|
||||
@@ -71,7 +85,19 @@ public interface BulkOperations {
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(Query query, Update update);
|
||||
default BulkOperations updateOne(Query query, Update update) {
|
||||
return updateOne(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
||||
*
|
||||
* @param query update criteria, must not be {@literal null}.
|
||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations updateOne(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
||||
@@ -79,7 +105,7 @@ public interface BulkOperations {
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateOne(List<Pair<Query, Update>> updates);
|
||||
BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates);
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
@@ -88,7 +114,19 @@ public interface BulkOperations {
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, Update update);
|
||||
default BulkOperations updateMulti(Query query, Update update) {
|
||||
return updateMulti(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations updateMulti(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
||||
@@ -96,7 +134,7 @@ public interface BulkOperations {
|
||||
* @param updates Update operations to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations updateMulti(List<Pair<Query, Update>> updates);
|
||||
BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates);
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
@@ -106,7 +144,20 @@ public interface BulkOperations {
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
*/
|
||||
BulkOperations upsert(Query query, Update update);
|
||||
default BulkOperations upsert(Query query, Update update) {
|
||||
return upsert(query, (UpdateDefinition) update);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
* else an insert.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param update Update operation to perform.
|
||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||
* @since 4.1
|
||||
*/
|
||||
BulkOperations upsert(Query query, UpdateDefinition update);
|
||||
|
||||
/**
|
||||
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||
@@ -138,7 +189,7 @@ public interface BulkOperations {
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
@@ -151,7 +202,7 @@ public interface BulkOperations {
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
@@ -0,0 +1,221 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Support class for bulk operations.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
abstract class BulkOperationsSupport {
|
||||
|
||||
private final String collectionName;
|
||||
|
||||
BulkOperationsSupport(String collectionName) {
|
||||
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link BeforeSaveEvent}.
|
||||
*
|
||||
* @param holder
|
||||
*/
|
||||
void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link AfterSaveEvent}.
|
||||
*
|
||||
* @param holder
|
||||
*/
|
||||
void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
WriteModel<Document> mapWriteModel(Object source, WriteModel<Document> writeModel) {
|
||||
|
||||
if (writeModel instanceof UpdateOneModel<Document> model) {
|
||||
|
||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
||||
|
||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
||||
}
|
||||
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof UpdateManyModel<Document> model) {
|
||||
|
||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
||||
|
||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
||||
}
|
||||
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteOneModel<Document> model) {
|
||||
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteManyModel<Document> model) {
|
||||
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
return writeModel;
|
||||
}
|
||||
|
||||
private List<Document> mapUpdatePipeline(AggregationUpdate source) {
|
||||
|
||||
Class<?> type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class;
|
||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type,
|
||||
updateMapper().getMappingContext(), queryMapper());
|
||||
|
||||
return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit a {@link ApplicationEvent} if event multicasting is enabled.
|
||||
*
|
||||
* @param event
|
||||
*/
|
||||
protected abstract void maybeEmitEvent(ApplicationEvent event);
|
||||
|
||||
/**
|
||||
* @return the {@link UpdateMapper} to use.
|
||||
*/
|
||||
protected abstract UpdateMapper updateMapper();
|
||||
|
||||
/**
|
||||
* @return the {@link QueryMapper} to use.
|
||||
*/
|
||||
protected abstract QueryMapper queryMapper();
|
||||
|
||||
/**
|
||||
* @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}.
|
||||
*/
|
||||
protected abstract Optional<? extends MongoPersistentEntity<?>> entity();
|
||||
|
||||
protected Bson getMappedUpdate(Bson update) {
|
||||
return updateMapper().getMappedObject(update, entity());
|
||||
}
|
||||
|
||||
protected Bson getMappedQuery(Bson query) {
|
||||
return queryMapper().getMappedObject(query, entity());
|
||||
}
|
||||
|
||||
protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
return switch (bulkMode) {
|
||||
case ORDERED -> options.ordered(true);
|
||||
case UNORDERED -> options.ordered(false);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
protected static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
record SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -36,21 +36,29 @@ import com.mongodb.client.model.changestream.OperationType;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Myroslav Kosinskyi
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ChangeStreamEvent<T> {
|
||||
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_UPDATER = AtomicReferenceFieldUpdater
|
||||
.newUpdater(ChangeStreamEvent.class, Object.class, "converted");
|
||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_FULL_DOCUMENT_UPDATER = AtomicReferenceFieldUpdater
|
||||
.newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocument");
|
||||
|
||||
@SuppressWarnings("rawtypes") //
|
||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER = AtomicReferenceFieldUpdater
|
||||
.newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocumentBeforeChange");
|
||||
|
||||
private final @Nullable ChangeStreamDocument<Document> raw;
|
||||
|
||||
private final Class<T> targetType;
|
||||
private final MongoConverter converter;
|
||||
|
||||
// accessed through CONVERTED_UPDATER.
|
||||
private volatile @Nullable T converted;
|
||||
// accessed through CONVERTED_FULL_DOCUMENT_UPDATER.
|
||||
private volatile @Nullable T convertedFullDocument;
|
||||
|
||||
// accessed through CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER.
|
||||
private volatile @Nullable T convertedFullDocumentBeforeChange;
|
||||
|
||||
/**
|
||||
* @param raw can be {@literal null}.
|
||||
@@ -147,27 +155,43 @@ public class ChangeStreamEvent<T> {
|
||||
@Nullable
|
||||
public T getBody() {
|
||||
|
||||
if (raw == null) {
|
||||
if (raw == null || raw.getFullDocument() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Document fullDocument = raw.getFullDocument();
|
||||
return getConvertedFullDocument(raw.getFullDocument());
|
||||
}
|
||||
|
||||
if (fullDocument == null) {
|
||||
return targetType.cast(fullDocument);
|
||||
/**
|
||||
* Get the potentially converted {@link ChangeStreamDocument#getFullDocumentBeforeChange() document} before being changed.
|
||||
*
|
||||
* @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocumentBeforeChange()} is
|
||||
* {@literal null}.
|
||||
* @since 4.0
|
||||
*/
|
||||
@Nullable
|
||||
public T getBodyBeforeChange() {
|
||||
|
||||
if (raw == null || raw.getFullDocumentBeforeChange() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return getConverted(fullDocument);
|
||||
return getConvertedFullDocumentBeforeChange(raw.getFullDocumentBeforeChange());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private T getConverted(Document fullDocument) {
|
||||
return (T) doGetConverted(fullDocument);
|
||||
private T getConvertedFullDocumentBeforeChange(Document fullDocument) {
|
||||
return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER);
|
||||
}
|
||||
|
||||
private Object doGetConverted(Document fullDocument) {
|
||||
@SuppressWarnings("unchecked")
|
||||
private T getConvertedFullDocument(Document fullDocument) {
|
||||
return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_UPDATER);
|
||||
}
|
||||
|
||||
Object result = CONVERTED_UPDATER.get(this);
|
||||
private Object doGetConverted(Document fullDocument, AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> updater) {
|
||||
|
||||
Object result = updater.get(this);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
@@ -176,30 +200,26 @@ public class ChangeStreamEvent<T> {
|
||||
if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) {
|
||||
|
||||
result = converter.read(targetType, fullDocument);
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
return updater.compareAndSet(this, null, result) ? result : updater.get(this);
|
||||
}
|
||||
|
||||
if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) {
|
||||
|
||||
result = converter.getConversionService().convert(fullDocument, targetType);
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
return updater.compareAndSet(this, null, result) ? result : updater.get(this);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
public boolean equals(@Nullable Object o) {
|
||||
|
||||
if (this == o)
|
||||
return true;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2018-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -32,6 +32,7 @@ import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocumentBeforeChange;
|
||||
|
||||
/**
|
||||
* Options applicable to MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Streams</a>. Intended
|
||||
@@ -40,6 +41,7 @@ import com.mongodb.client.model.changestream.FullDocument;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Myroslav Kosinskyi
|
||||
* @since 2.1
|
||||
*/
|
||||
public class ChangeStreamOptions {
|
||||
@@ -47,6 +49,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable Object filter;
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
@@ -74,6 +77,14 @@ public class ChangeStreamOptions {
|
||||
return Optional.ofNullable(fullDocumentLookup);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 4.0
|
||||
*/
|
||||
public Optional<FullDocumentBeforeChange> getFullDocumentBeforeChangeLookup() {
|
||||
return Optional.ofNullable(fullDocumentBeforeChangeLookup);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
@@ -139,21 +150,21 @@ public class ChangeStreamOptions {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
if (timestamp instanceof Instant) {
|
||||
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||
if (timestamp instanceof Instant instant) {
|
||||
return new BsonTimestamp((int) instant.getEpochSecond(), 0);
|
||||
}
|
||||
|
||||
if (timestamp instanceof BsonTimestamp) {
|
||||
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||
if (timestamp instanceof BsonTimestamp bsonTimestamp) {
|
||||
return Instant.ofEpochSecond(bsonTimestamp.getTime());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
"o_O that should actually not happen; The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
@@ -170,6 +181,9 @@ public class ChangeStreamOptions {
|
||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentBeforeChangeLookup, that.fullDocumentBeforeChangeLookup)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) {
|
||||
return false;
|
||||
}
|
||||
@@ -184,6 +198,7 @@ public class ChangeStreamOptions {
|
||||
int result = ObjectUtils.nullSafeHashCode(filter);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentBeforeChangeLookup);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resume);
|
||||
@@ -220,6 +235,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable Object filter;
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
@@ -234,7 +250,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder collation(Collation collation) {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null nor empty!");
|
||||
Assert.notNull(collation, "Collation must not be null nor empty");
|
||||
|
||||
this.collation = collation;
|
||||
return this;
|
||||
@@ -258,7 +274,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder filter(Aggregation filter) {
|
||||
|
||||
Assert.notNull(filter, "Filter must not be null!");
|
||||
Assert.notNull(filter, "Filter must not be null");
|
||||
|
||||
this.filter = filter;
|
||||
return this;
|
||||
@@ -287,7 +303,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) {
|
||||
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null");
|
||||
|
||||
this.resumeToken = resumeToken;
|
||||
|
||||
@@ -316,12 +332,38 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) {
|
||||
|
||||
Assert.notNull(lookup, "Lookup must not be null!");
|
||||
Assert.notNull(lookup, "Lookup must not be null");
|
||||
|
||||
this.fullDocumentLookup = lookup;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link FullDocumentBeforeChange} lookup to use.
|
||||
*
|
||||
* @param lookup must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 4.0
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) {
|
||||
|
||||
Assert.notNull(lookup, "Lookup must not be null");
|
||||
|
||||
this.fullDocumentBeforeChangeLookup = lookup;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the full document before being changed if it is available.
|
||||
*
|
||||
* @return this.
|
||||
* @since 4.0
|
||||
* @see #fullDocumentBeforeChangeLookup(FullDocumentBeforeChange)
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder returnFullDocumentBeforeChange() {
|
||||
return fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
@@ -330,7 +372,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
@@ -345,7 +387,7 @@ public class ChangeStreamOptions {
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
@@ -391,6 +433,7 @@ public class ChangeStreamOptions {
|
||||
options.filter = this.filter;
|
||||
options.resumeToken = this.resumeToken;
|
||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||
options.fullDocumentBeforeChangeLookup = this.fullDocumentBeforeChangeLookup;
|
||||
options.collation = this.collation;
|
||||
options.resumeTimestamp = this.resumeTimestamp;
|
||||
options.resume = this.resume;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2021 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2021 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -26,6 +26,7 @@ import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.ValidationAction;
|
||||
import com.mongodb.client.model.ValidationLevel;
|
||||
@@ -46,24 +47,11 @@ public class CollectionOptions {
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
*
|
||||
* @param size the collection size in bytes, this data space is preallocated. Can be {@literal null}.
|
||||
* @param maxDocuments the maximum number of documents in the collection. Can be {@literal null}.
|
||||
* @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order),
|
||||
* false otherwise. Can be {@literal null}.
|
||||
* @deprecated since 2.0 please use {@link CollectionOptions#empty()} as entry point.
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none(), null);
|
||||
}
|
||||
private @Nullable CollectionChangeStreamOptions changeStreamOptions;
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions,
|
||||
@Nullable TimeSeriesOptions timeSeriesOptions) {
|
||||
@Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions,
|
||||
@Nullable CollectionChangeStreamOptions changeStreamOptions) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
@@ -71,6 +59,7 @@ public class CollectionOptions {
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
this.timeSeriesOptions = timeSeriesOptions;
|
||||
this.changeStreamOptions = changeStreamOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -82,9 +71,9 @@ public class CollectionOptions {
|
||||
*/
|
||||
public static CollectionOptions just(Collation collation) {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
Assert.notNull(collation, "Collation must not be null");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null);
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -94,7 +83,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null);
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -111,15 +100,28 @@ public class CollectionOptions {
|
||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for emitting (pre & post) change events.
|
||||
*
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #changeStream(CollectionChangeStreamOptions)
|
||||
* @see CollectionChangeStreamOptions#preAndPostImages(boolean)
|
||||
* @since 4.0
|
||||
*/
|
||||
public static CollectionOptions emitChangedRevisions() {
|
||||
return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}. <br />
|
||||
* <strong>NOTE</strong> Using capped collections requires defining {@link #size(long)}.
|
||||
* <strong>NOTE:</strong> Using capped collections requires defining {@link #size(long)}.
|
||||
*
|
||||
* @return new {@link CollectionOptions}.
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -130,7 +132,8 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -141,7 +144,8 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -152,7 +156,8 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -244,7 +249,7 @@ public class CollectionOptions {
|
||||
*/
|
||||
public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) {
|
||||
|
||||
Assert.notNull(validationLevel, "ValidationLevel must not be null!");
|
||||
Assert.notNull(validationLevel, "ValidationLevel must not be null");
|
||||
return validation(validationOptions.validationLevel(validationLevel));
|
||||
}
|
||||
|
||||
@@ -258,7 +263,7 @@ public class CollectionOptions {
|
||||
*/
|
||||
public CollectionOptions schemaValidationAction(ValidationAction validationAction) {
|
||||
|
||||
Assert.notNull(validationAction, "ValidationAction must not be null!");
|
||||
Assert.notNull(validationAction, "ValidationAction must not be null");
|
||||
return validation(validationOptions.validationAction(validationAction));
|
||||
}
|
||||
|
||||
@@ -271,8 +276,9 @@ public class CollectionOptions {
|
||||
*/
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -284,8 +290,23 @@ public class CollectionOptions {
|
||||
*/
|
||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param changeStreamOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions changeStream(CollectionChangeStreamOptions changeStreamOptions) {
|
||||
|
||||
Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
||||
changeStreamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -346,6 +367,70 @@ public class CollectionOptions {
|
||||
return Optional.ofNullable(timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link CollectionChangeStreamOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 4.0
|
||||
*/
|
||||
public Optional<CollectionChangeStreamOptions> getChangeStreamOptions() {
|
||||
return Optional.ofNullable(changeStreamOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped
|
||||
+ ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions="
|
||||
+ timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", disableValidation="
|
||||
+ disableValidation() + ", strictValidation=" + strictValidation() + ", moderateValidation="
|
||||
+ moderateValidation() + ", warnOnValidationError=" + warnOnValidationError() + ", failOnValidationError="
|
||||
+ failOnValidationError() + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CollectionOptions that = (CollectionOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(maxDocuments, that.maxDocuments)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(size, that.size)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(capped, that.capped)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(collation, that.collation)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(maxDocuments);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(size);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(capped);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
@@ -440,6 +525,92 @@ public class CollectionOptions {
|
||||
boolean isEmpty() {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
return "ValidationOptions{" + "validator=" + validator + ", validationLevel=" + validationLevel
|
||||
+ ", validationAction=" + validationAction + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ValidationOptions that = (ValidationOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(validator, that.validator)) {
|
||||
return false;
|
||||
}
|
||||
if (validationLevel != that.validationLevel)
|
||||
return false;
|
||||
return validationAction == that.validationAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(validator);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(validationLevel);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(validationAction);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of options applied to define collections change stream behaviour.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 4.0
|
||||
*/
|
||||
public static class CollectionChangeStreamOptions {
|
||||
|
||||
private final boolean preAndPostImages;
|
||||
|
||||
private CollectionChangeStreamOptions(boolean emitChangedRevisions) {
|
||||
this.preAndPostImages = emitChangedRevisions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Output the version of a document before and after changes (the document pre- and post-images).
|
||||
*
|
||||
* @return new instance of {@link CollectionChangeStreamOptions}.
|
||||
*/
|
||||
public static CollectionChangeStreamOptions preAndPostImages(boolean emitChangedRevisions) {
|
||||
return new CollectionChangeStreamOptions(true);
|
||||
}
|
||||
|
||||
public boolean getPreAndPostImages() {
|
||||
return preAndPostImages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "CollectionChangeStreamOptions{" + "preAndPostImages=" + preAndPostImages + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CollectionChangeStreamOptions that = (CollectionChangeStreamOptions) o;
|
||||
|
||||
return preAndPostImages == that.preAndPostImages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return (preAndPostImages ? 1 : 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -460,7 +631,7 @@ public class CollectionOptions {
|
||||
|
||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||
|
||||
Assert.hasText(timeField, "Time field must not be empty or null!");
|
||||
Assert.hasText(timeField, "Time field must not be empty or null");
|
||||
|
||||
this.timeField = timeField;
|
||||
this.metaField = metaField;
|
||||
@@ -525,5 +696,40 @@ public class CollectionOptions {
|
||||
public GranularityDefinition getGranularity() {
|
||||
return granularity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
return "TimeSeriesOptions{" + "timeField='" + timeField + '\'' + ", metaField='" + metaField + '\''
|
||||
+ ", granularity=" + granularity + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(@Nullable Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TimeSeriesOptions that = (TimeSeriesOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(timeField, that.timeField)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(metaField, that.metaField)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(granularity, that.granularity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(timeField);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(metaField);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(granularity);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Interface for functional preparation of a {@link MongoCollection}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
public interface CollectionPreparer<T> {
|
||||
|
||||
/**
|
||||
* Returns a preparer that always returns its input collection.
|
||||
*
|
||||
* @return a preparer that always returns its input collection.
|
||||
*/
|
||||
static <T> CollectionPreparer<T> identity() {
|
||||
return it -> it;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the {@code collection}.
|
||||
*
|
||||
* @param collection the collection to prepare.
|
||||
* @return the prepared collection.
|
||||
*/
|
||||
T prepare(T collection);
|
||||
|
||||
/**
|
||||
* Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies
|
||||
* the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to
|
||||
* the caller of the composed function.
|
||||
*
|
||||
* @param after the collection preparer to apply after this function is applied.
|
||||
* @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after}
|
||||
* preparer.
|
||||
*/
|
||||
default CollectionPreparer<T> andThen(CollectionPreparer<T> after) {
|
||||
Assert.notNull(after, "After CollectionPreparer must not be null");
|
||||
return c -> after.prepare(prepare(c));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,182 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon
|
||||
* {@link CollectionPreparer preparing a collection}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware {
|
||||
|
||||
private final List<Object> sources;
|
||||
|
||||
private CollectionPreparerSupport(List<Object> sources) {
|
||||
this.sources = sources;
|
||||
}
|
||||
|
||||
<T> T doPrepare(T collection, Function<T, ReadConcern> concernAccessor, BiFunction<T, ReadConcern, T> concernFunction,
|
||||
Function<T, ReadPreference> preferenceAccessor, BiFunction<T, ReadPreference, T> preferenceFunction) {
|
||||
|
||||
T collectionToUse = collection;
|
||||
|
||||
for (Object source : sources) {
|
||||
if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
||||
|
||||
ReadConcern concern = rca.getReadConcern();
|
||||
if (concernAccessor.apply(collectionToUse) != concern) {
|
||||
collectionToUse = concernFunction.apply(collectionToUse, concern);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (Object source : sources) {
|
||||
if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
||||
|
||||
ReadPreference preference = rpa.getReadPreference();
|
||||
if (preferenceAccessor.apply(collectionToUse) != preference) {
|
||||
collectionToUse = preferenceFunction.apply(collectionToUse, preference);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return collectionToUse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasReadConcern() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadConcern getReadConcern() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
||||
return rca.getReadConcern();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasReadPreference() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReadPreference getReadPreference() {
|
||||
|
||||
for (Object aware : sources) {
|
||||
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
||||
return rpa.getReadPreference();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
static class CollectionPreparerDelegate extends CollectionPreparerSupport
|
||||
implements CollectionPreparer<MongoCollection<Document>> {
|
||||
|
||||
private CollectionPreparerDelegate(List<Object> sources) {
|
||||
super(sources);
|
||||
}
|
||||
|
||||
public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) {
|
||||
return of((Object[]) awares);
|
||||
}
|
||||
|
||||
public static CollectionPreparerDelegate of(Object... mixedAwares) {
|
||||
|
||||
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
|
||||
return (CollectionPreparerDelegate) mixedAwares[0];
|
||||
}
|
||||
|
||||
return new CollectionPreparerDelegate(Arrays.asList(mixedAwares));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoCollection<Document> prepare(MongoCollection<Document> collection) {
|
||||
return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern,
|
||||
MongoCollection::getReadPreference, MongoCollection::withReadPreference);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport
|
||||
implements CollectionPreparer<com.mongodb.reactivestreams.client.MongoCollection<Document>> {
|
||||
|
||||
private ReactiveCollectionPreparerDelegate(List<Object> sources) {
|
||||
super(sources);
|
||||
}
|
||||
|
||||
public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) {
|
||||
return of((Object[]) awares);
|
||||
}
|
||||
|
||||
public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) {
|
||||
|
||||
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
|
||||
return (ReactiveCollectionPreparerDelegate) mixedAwares[0];
|
||||
}
|
||||
|
||||
return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares));
|
||||
}
|
||||
|
||||
@Override
|
||||
public com.mongodb.reactivestreams.client.MongoCollection<Document> prepare(
|
||||
com.mongodb.reactivestreams.client.MongoCollection<Document> collection) {
|
||||
return doPrepare(collection, //
|
||||
com.mongodb.reactivestreams.client.MongoCollection::getReadConcern,
|
||||
com.mongodb.reactivestreams.client.MongoCollection::withReadConcern,
|
||||
com.mongodb.reactivestreams.client.MongoCollection::getReadPreference,
|
||||
com.mongodb.reactivestreams.client.MongoCollection::withReadPreference);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019-2021 the original author or authors.
|
||||
* Copyright 2019-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -23,8 +23,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.geo.Point;
|
||||
import org.springframework.data.mongodb.core.query.MetricConversion;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
@@ -38,7 +38,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*/
|
||||
class CountQuery {
|
||||
|
||||
private Document source;
|
||||
private final Document source;
|
||||
|
||||
private CountQuery(Document source) {
|
||||
this.source = source;
|
||||
@@ -64,18 +64,15 @@ class CountQuery {
|
||||
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
|
||||
if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Document theValue = (Document) entry.getValue();
|
||||
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
|
||||
target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and")));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
|
||||
if (entry.getValue() instanceof Collection<?> collection && requiresRewrite(entry.getValue())) {
|
||||
|
||||
Collection<?> source = (Collection<?>) entry.getValue();
|
||||
|
||||
target.put(entry.getKey(), rewriteCollection(source));
|
||||
target.put(entry.getKey(), rewriteCollection(collection));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -96,12 +93,12 @@ class CountQuery {
|
||||
*/
|
||||
private boolean requiresRewrite(Object valueToInspect) {
|
||||
|
||||
if (valueToInspect instanceof Document) {
|
||||
return requiresRewrite((Document) valueToInspect);
|
||||
if (valueToInspect instanceof Document document) {
|
||||
return requiresRewrite(document);
|
||||
}
|
||||
|
||||
if (valueToInspect instanceof Collection) {
|
||||
return requiresRewrite((Collection) valueToInspect);
|
||||
if (valueToInspect instanceof Collection<?> collection) {
|
||||
return requiresRewrite(collection);
|
||||
}
|
||||
|
||||
return false;
|
||||
@@ -110,7 +107,7 @@ class CountQuery {
|
||||
private boolean requiresRewrite(Collection<?> collection) {
|
||||
|
||||
for (Object o : collection) {
|
||||
if (o instanceof Document && requiresRewrite((Document) o)) {
|
||||
if (o instanceof Document document && requiresRewrite(document)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -139,8 +136,8 @@ class CountQuery {
|
||||
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
||||
|
||||
for (Object item : source) {
|
||||
if (item instanceof Document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
|
||||
if (item instanceof Document document && requiresRewrite(item)) {
|
||||
rewrittenCollection.add(CountQuery.of(document).toQueryDocument());
|
||||
} else {
|
||||
rewrittenCollection.add(item);
|
||||
}
|
||||
@@ -157,12 +154,14 @@ class CountQuery {
|
||||
* @param $and potentially existing {@code $and} condition.
|
||||
* @return the rewritten query {@link Document}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Document createGeoWithin(String key, Document source, @Nullable Object $and) {
|
||||
|
||||
boolean spheric = source.containsKey("$nearSphere");
|
||||
Object $near = spheric ? source.get("$nearSphere") : source.get("$near");
|
||||
|
||||
Number maxDistance = source.containsKey("$maxDistance") ? (Number) source.get("$maxDistance") : Double.MAX_VALUE;
|
||||
Number maxDistance = getMaxDistance(source, $near, spheric);
|
||||
|
||||
List<Object> $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance);
|
||||
Document $geoWithinMax = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMax));
|
||||
@@ -176,23 +175,51 @@ class CountQuery {
|
||||
Document $geoWithinMin = new Document("$geoWithin",
|
||||
new Document(spheric ? "$centerSphere" : "$center", $centerMin));
|
||||
|
||||
List<Document> criteria = new ArrayList<>();
|
||||
List<Document> criteria;
|
||||
|
||||
if ($and != null) {
|
||||
if ($and instanceof Collection) {
|
||||
criteria.addAll((Collection) $and);
|
||||
Collection<Document> andElements = (Collection<Document>) $and;
|
||||
criteria = new ArrayList<>(andElements.size() + 2);
|
||||
criteria.addAll(andElements);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
|
||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection: Offending element: "
|
||||
+ $and);
|
||||
}
|
||||
} else {
|
||||
criteria = new ArrayList<>(2);
|
||||
}
|
||||
|
||||
criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin))));
|
||||
criteria.add(new Document(key, $geoWithinMax));
|
||||
|
||||
return new Document("$and", criteria);
|
||||
}
|
||||
|
||||
private static Number getMaxDistance(Document source, Object $near, boolean spheric) {
|
||||
|
||||
Number maxDistance = Double.MAX_VALUE;
|
||||
|
||||
if (source.containsKey("$maxDistance")) { // legacy coordinate pair
|
||||
return (Number) source.get("$maxDistance");
|
||||
}
|
||||
|
||||
if ($near instanceof Document nearDoc) {
|
||||
|
||||
if (nearDoc.containsKey("$maxDistance")) {
|
||||
|
||||
maxDistance = (Number) nearDoc.get("$maxDistance");
|
||||
// geojson is in Meters but we need radians x/(6378.1*1000)
|
||||
if (spheric && nearDoc.containsKey("$geometry")) {
|
||||
maxDistance = MetricConversion.metersToRadians(maxDistance.doubleValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return maxDistance;
|
||||
}
|
||||
|
||||
private static boolean containsNear(Document source) {
|
||||
return source.containsKey("$near") || source.containsKey("$nearSphere");
|
||||
}
|
||||
@@ -212,14 +239,20 @@ class CountQuery {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value instanceof Point) {
|
||||
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
|
||||
if (value instanceof Point point) {
|
||||
return Arrays.asList(point.getX(), point.getY());
|
||||
}
|
||||
|
||||
if (value instanceof Document && ((Document) value).containsKey("x")) {
|
||||
if (value instanceof Document document) {
|
||||
|
||||
Document point = (Document) value;
|
||||
return Arrays.asList(point.get("x"), point.get("y"));
|
||||
if (document.containsKey("x")) {
|
||||
return Arrays.asList(document.get("x"), document.get("y"));
|
||||
}
|
||||
|
||||
if (document.containsKey("$geometry")) {
|
||||
Document geoJsonPoint = document.get("$geometry", Document.class);
|
||||
return geoJsonPoint.get("coordinates");
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2002-2021 the original author or authors.
|
||||
* Copyright 2002-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -61,8 +61,8 @@ public interface CursorPreparer extends ReadPreferenceAware {
|
||||
default FindIterable<Document> initiateFind(MongoCollection<Document> collection,
|
||||
Function<MongoCollection<Document>, FindIterable<Document>> find) {
|
||||
|
||||
Assert.notNull(collection, "Collection must not be null!");
|
||||
Assert.notNull(find, "Find function must not be null!");
|
||||
Assert.notNull(collection, "Collection must not be null");
|
||||
Assert.notNull(find, "Find function must not be null");
|
||||
|
||||
if (hasReadPreference()) {
|
||||
collection = collection.withReadPreference(getReadPreference());
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2021 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -16,42 +16,47 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.*;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -67,7 +72,7 @@ import com.mongodb.client.model.*;
|
||||
* @author Jacob Botuck
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations {
|
||||
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
@@ -75,7 +80,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOptions bulkOptions;
|
||||
|
||||
/**
|
||||
@@ -90,14 +94,15 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
DefaultBulkOperations(MongoOperations mongoOperations, String collectionName,
|
||||
BulkOperationContext bulkOperationContext) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty!");
|
||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null!");
|
||||
super(collectionName);
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.bulkOperationContext = bulkOperationContext;
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -109,14 +114,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
this.defaultWriteConcern = defaultWriteConcern;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(Object document) {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
Assert.notNull(document, "Document must not be null");
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||
@@ -125,93 +126,65 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations insert(List<? extends Object> documents) {
|
||||
|
||||
Assert.notNull(documents, "Documents must not be null!");
|
||||
Assert.notNull(documents, "Documents must not be null");
|
||||
|
||||
documents.forEach(this::insert);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateOne(Query query, Update update) {
|
||||
public BulkOperations updateOne(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
return updateOne(Collections.singletonList(Pair.of(query, update)));
|
||||
return update(query, update, false, false);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateOne(List<Pair<Query, Update>> updates) {
|
||||
public BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
Assert.notNull(updates, "Updates must not be null");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, false);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public BulkOperations updateMulti(Query query, Update update) {
|
||||
public BulkOperations updateMulti(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
return updateMulti(Collections.singletonList(Pair.of(query, update)));
|
||||
update(query, update, false, true);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations updateMulti(List<Pair<Query, Update>> updates) {
|
||||
public BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates) {
|
||||
|
||||
Assert.notNull(updates, "Updates must not be null!");
|
||||
Assert.notNull(updates, "Updates must not be null");
|
||||
|
||||
for (Pair<Query, Update> update : updates) {
|
||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
||||
update(update.getFirst(), update.getSecond(), false, true);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(Query query, Update update) {
|
||||
public BulkOperations upsert(Query query, UpdateDefinition update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations upsert(List<Pair<Query, Update>> updates) {
|
||||
|
||||
@@ -222,14 +195,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
@@ -239,14 +208,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations remove(List<Query> removes) {
|
||||
|
||||
Assert.notNull(removes, "Removals must not be null!");
|
||||
Assert.notNull(removes, "Removals must not be null");
|
||||
|
||||
for (Query query : removes) {
|
||||
remove(query);
|
||||
@@ -255,16 +220,12 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(replacement, "Replacement must not be null");
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
@@ -278,10 +239,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
*/
|
||||
@Override
|
||||
public com.mongodb.bulk.BulkWriteResult execute() {
|
||||
|
||||
@@ -289,14 +246,14 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
Assert.state(result != null, "Result must not be null");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -315,9 +272,8 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
bulkOptions);
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
if (ex instanceof MongoBulkWriteException) {
|
||||
if (ex instanceof MongoBulkWriteException mongoBulkWriteException) {
|
||||
|
||||
MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex;
|
||||
if (mongoBulkWriteException.getWriteConcernError() != null) {
|
||||
throw new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
@@ -332,17 +288,17 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
if (it.model() instanceof InsertOneModel<Document> model) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = model.getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
||||
} else if (it.model() instanceof ReplaceOneModel<Document> model) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
Document target = model.getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.getModel());
|
||||
return mapWriteModel(it.source(), it.model());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -354,10 +310,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
|
||||
private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(update, "Update must not be null!");
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
||||
|
||||
@@ -370,53 +326,30 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
private WriteModel<Document> mapWriteModel(WriteModel<Document> writeModel) {
|
||||
|
||||
if (writeModel instanceof UpdateOneModel) {
|
||||
|
||||
UpdateOneModel<Document> model = (UpdateOneModel<Document>) writeModel;
|
||||
|
||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof UpdateManyModel) {
|
||||
|
||||
UpdateManyModel<Document> model = (UpdateManyModel<Document>) writeModel;
|
||||
|
||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||
model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteOneModel) {
|
||||
|
||||
DeleteOneModel<Document> model = (DeleteOneModel<Document>) writeModel;
|
||||
|
||||
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
if (writeModel instanceof DeleteManyModel) {
|
||||
|
||||
DeleteManyModel<Document> model = (DeleteManyModel<Document>) writeModel;
|
||||
|
||||
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||
}
|
||||
|
||||
return writeModel;
|
||||
@Override
|
||||
protected void maybeEmitEvent(ApplicationEvent event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
}
|
||||
|
||||
private Bson getMappedUpdate(Bson update) {
|
||||
return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity());
|
||||
@Override
|
||||
protected UpdateMapper updateMapper() {
|
||||
return bulkOperationContext.updateMapper();
|
||||
}
|
||||
|
||||
private Bson getMappedQuery(Bson query) {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
@Override
|
||||
protected QueryMapper queryMapper() {
|
||||
return bulkOperationContext.queryMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<? extends MongoPersistentEntity<?>> entity() {
|
||||
return bulkOperationContext.entity();
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
if (source instanceof Document document) {
|
||||
return document;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
@@ -429,268 +362,83 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.getModel() instanceof InsertOneModel) {
|
||||
if (holder.model() instanceof InsertOneModel<Document> model) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||
Document target = model.getDocument();
|
||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
} else if (holder.model() instanceof ReplaceOneModel<Document> model) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||
Document target = model.getReplacement();
|
||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (bulkOperationContext.getEventPublisher() == null) {
|
||||
return event;
|
||||
}
|
||||
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
return event;
|
||||
private void publishEvent(MongoMappingEvent<?> event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
|
||||
switch (bulkMode) {
|
||||
case ORDERED:
|
||||
return options.ordered(true);
|
||||
case UNORDERED:
|
||||
return options.ordered(false);
|
||||
}
|
||||
|
||||
throw new IllegalStateException("BulkMode was null!");
|
||||
return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||
* @param update The {@link Update} to apply
|
||||
* @param upsert flag to indicate if document should be upserted.
|
||||
* @return new instance of {@link UpdateOptions}.
|
||||
*/
|
||||
private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||
|
||||
UpdateOptions options = new UpdateOptions();
|
||||
options.upsert(upsert);
|
||||
|
||||
if (update.hasArrayFilters()) {
|
||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||
list.add(arrayFilter.asDocument());
|
||||
}
|
||||
options.arrayFilters(list);
|
||||
}
|
||||
|
||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link BulkOperationContext} holds information about
|
||||
* {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to
|
||||
* {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to
|
||||
* {@link QueryMapper} and {@link UpdateMapper}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
static final class BulkOperationContext {
|
||||
record BulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
|
||||
@Nullable EntityCallbacks entityCallbacks) {
|
||||
|
||||
private final BulkMode bulkMode;
|
||||
private final Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final ApplicationEventPublisher eventPublisher;
|
||||
private final EntityCallbacks entityCallbacks;
|
||||
|
||||
BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher,
|
||||
EntityCallbacks entityCallbacks) {
|
||||
|
||||
this.bulkMode = bulkMode;
|
||||
this.entity = entity;
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.eventPublisher = eventPublisher;
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
public boolean skipEntityCallbacks() {
|
||||
return entityCallbacks == null;
|
||||
}
|
||||
|
||||
public BulkMode getBulkMode() {
|
||||
return this.bulkMode;
|
||||
public boolean skipEventPublishing() {
|
||||
return eventPublisher == null;
|
||||
}
|
||||
|
||||
public Optional<? extends MongoPersistentEntity<?>> getEntity() {
|
||||
return this.entity;
|
||||
}
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
|
||||
|
||||
public QueryMapper getQueryMapper() {
|
||||
return this.queryMapper;
|
||||
}
|
||||
|
||||
public UpdateMapper getUpdateMapper() {
|
||||
return this.updateMapper;
|
||||
}
|
||||
|
||||
public ApplicationEventPublisher getEventPublisher() {
|
||||
return this.eventPublisher;
|
||||
}
|
||||
|
||||
public EntityCallbacks getEntityCallbacks() {
|
||||
return this.entityCallbacks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
BulkOperationContext that = (BulkOperationContext) o;
|
||||
|
||||
if (bulkMode != that.bulkMode)
|
||||
return false;
|
||||
if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) {
|
||||
return false;
|
||||
if (skipEntityCallbacks()) {
|
||||
return entity;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks);
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, collectionName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = bulkMode != null ? bulkMode.hashCode() : 0;
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entity);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks);
|
||||
return result;
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
|
||||
String collectionName) {
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return entity;
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, document, collectionName);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity="
|
||||
+ this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper()
|
||||
+ ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")";
|
||||
public void publishEvent(ApplicationEvent event) {
|
||||
|
||||
if (skipEventPublishing()) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventPublisher.publishEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static final class SourceAwareWriteModelHolder {
|
||||
|
||||
private final Object source;
|
||||
private final WriteModel<Document> model;
|
||||
|
||||
SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
|
||||
this.source = source;
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
public Object getSource() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public WriteModel<Document> getModel() {
|
||||
return this.model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.source, that.source)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.model, that.model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(model);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(source);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model="
|
||||
+ this.getModel() + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
* Copyright 2011-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import java.util.List;
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -83,9 +85,9 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||
@Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||
Assert.notNull(collectionName, "Collection name can not be null!");
|
||||
Assert.notNull(queryMapper, "QueryMapper must not be null!");
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null");
|
||||
Assert.notNull(collectionName, "Collection name can not be null");
|
||||
Assert.notNull(queryMapper, "QueryMapper must not be null");
|
||||
|
||||
this.collectionName = collectionName;
|
||||
this.mapper = queryMapper;
|
||||
@@ -103,8 +105,8 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
*/
|
||||
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class<?> type) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.mapper = new QueryMapper(mongoOperations.getConverter());
|
||||
@@ -112,10 +114,6 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
|
||||
*/
|
||||
public String ensureIndex(final IndexDefinition indexDefinition) {
|
||||
|
||||
return execute(collection -> {
|
||||
@@ -150,10 +148,6 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public void dropIndex(final String name) {
|
||||
|
||||
execute(collection -> {
|
||||
@@ -163,18 +157,24 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#dropAllIndexes()
|
||||
*/
|
||||
@Override
|
||||
public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
|
||||
|
||||
Document indexOptions = new Document("name", name);
|
||||
indexOptions.putAll(options.toDocument());
|
||||
|
||||
Document result = mongoOperations
|
||||
.execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)));
|
||||
|
||||
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
|
||||
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
|
||||
}
|
||||
}
|
||||
|
||||
public void dropAllIndexes() {
|
||||
dropIndex("*");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperations#getIndexInfo()
|
||||
*/
|
||||
public List<IndexInfo> getIndexInfo() {
|
||||
|
||||
return execute(new CollectionCallback<List<IndexInfo>>() {
|
||||
@@ -188,7 +188,8 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
|
||||
|
||||
List<IndexInfo> indexInfoList = new ArrayList<>();
|
||||
int available = cursor.available();
|
||||
List<IndexInfo> indexInfoList = available > 0 ? new ArrayList<>(available) : new ArrayList<>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
|
||||
@@ -205,7 +206,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
@Nullable
|
||||
public <T> T execute(CollectionCallback<T> callback) {
|
||||
|
||||
Assert.notNull(callback, "CollectionCallback must not be null!");
|
||||
Assert.notNull(callback, "CollectionCallback must not be null");
|
||||
|
||||
if (type != null) {
|
||||
return mongoOperations.execute(type, callback);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2021 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -42,10 +42,6 @@ class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
this.mapper = mapper;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public IndexOperations indexOps(String collectionName, Class<?> type) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type);
|
||||
|
||||
@@ -0,0 +1,390 @@
|
||||
/*
|
||||
* Copyright 2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.context.ApplicationEvent;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.data.mapping.callback.EntityCallback;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.ReplaceOneModel;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link ReactiveBulkOperations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 4.1
|
||||
*/
|
||||
class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations {
|
||||
|
||||
private final ReactiveMongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final ReactiveBulkOperationContext bulkOperationContext;
|
||||
private final List<Mono<SourceAwareWriteModelHolder>> models = new ArrayList<>();
|
||||
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOptions bulkOptions;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and
|
||||
* {@link ReactiveBulkOperationContext}.
|
||||
*
|
||||
* @param mongoOperations must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @param bulkOperationContext must not be {@literal null}.
|
||||
*/
|
||||
DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName,
|
||||
ReactiveBulkOperationContext bulkOperationContext) {
|
||||
|
||||
super(collectionName);
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.bulkOperationContext = bulkOperationContext;
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
||||
*
|
||||
* @param defaultWriteConcern can be {@literal null}.
|
||||
*/
|
||||
void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) {
|
||||
this.defaultWriteConcern = defaultWriteConcern;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations insert(Object document) {
|
||||
|
||||
Assert.notNull(document, "Document must not be null");
|
||||
|
||||
this.models.add(Mono.just(document).flatMap(it -> {
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
|
||||
return maybeInvokeBeforeConvertCallback(it);
|
||||
}).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it)))));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations insert(List<? extends Object> documents) {
|
||||
|
||||
Assert.notNull(documents, "Documents must not be null");
|
||||
|
||||
documents.forEach(this::insert);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
update(query, update, false, false);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
update(query, update, false, true);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) {
|
||||
return update(query, update, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations remove(Query query) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
|
||||
this.models.add(Mono.just(query)
|
||||
.map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions))));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations remove(List<Query> removes) {
|
||||
|
||||
Assert.notNull(removes, "Removals must not be null");
|
||||
|
||||
for (Query query : removes) {
|
||||
remove(query);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(replacement, "Replacement must not be null");
|
||||
Assert.notNull(options, "Options must not be null");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
this.models.add(Mono.just(replacement).flatMap(it -> {
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
|
||||
return maybeInvokeBeforeConvertCallback(it);
|
||||
}).map(it -> new SourceAwareWriteModelHolder(it,
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(it), replaceOptions))));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<BulkWriteResult> execute() {
|
||||
|
||||
try {
|
||||
return mongoOperations.execute(collectionName, this::bulkWriteTo).next();
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
private Mono<BulkWriteResult> bulkWriteTo(MongoCollection<Document> collection) {
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
Flux<SourceAwareWriteModelHolder> concat = Flux.concat(models).flatMap(it -> {
|
||||
|
||||
if (it.model()instanceof InsertOneModel<Document> iom) {
|
||||
|
||||
Document target = iom.getDocument();
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
return maybeInvokeBeforeSaveCallback(it.source(), target)
|
||||
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom)));
|
||||
} else if (it.model()instanceof ReplaceOneModel<Document> rom) {
|
||||
|
||||
Document target = rom.getReplacement();
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
return maybeInvokeBeforeSaveCallback(it.source(), target)
|
||||
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom)));
|
||||
}
|
||||
|
||||
return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model())));
|
||||
});
|
||||
|
||||
MongoCollection<Document> theCollection = collection;
|
||||
return concat.collectList().flatMap(it -> {
|
||||
|
||||
return Mono
|
||||
.from(theCollection
|
||||
.bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions))
|
||||
.doOnSuccess(state -> {
|
||||
it.forEach(this::maybeEmitAfterSaveEvent);
|
||||
}).flatMap(state -> {
|
||||
List<Mono<Object>> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList());
|
||||
|
||||
return Flux.concat(monos).then(Mono.just(state));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
* @param query the {@link Query} to determine documents to update.
|
||||
* @param update the {@link Update} to perform, must not be {@literal null}.
|
||||
* @param upsert whether to upsert.
|
||||
* @param multi whether to issue a multi-update.
|
||||
* @return the {@link BulkOperations} with the update registered.
|
||||
*/
|
||||
private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null");
|
||||
Assert.notNull(update, "Update must not be null");
|
||||
|
||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
||||
|
||||
this.models.add(Mono.just(update).map(it -> {
|
||||
if (multi) {
|
||||
return new SourceAwareWriteModelHolder(update,
|
||||
new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options));
|
||||
}
|
||||
return new SourceAwareWriteModelHolder(update,
|
||||
new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options));
|
||||
}));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void maybeEmitEvent(ApplicationEvent event) {
|
||||
bulkOperationContext.publishEvent(event);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UpdateMapper updateMapper() {
|
||||
return bulkOperationContext.updateMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected QueryMapper queryMapper() {
|
||||
return bulkOperationContext.queryMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Optional<? extends MongoPersistentEntity<?>> entity() {
|
||||
return bulkOperationContext.entity();
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||
|
||||
if (holder.model() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
||||
return maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
||||
return maybeInvokeAfterSaveCallback(holder.source(), target);
|
||||
}
|
||||
return Mono.just(holder.source());
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeBeforeConvertCallback(Object value) {
|
||||
return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
private Mono<Object> maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||
return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as
|
||||
* references to {@link QueryMapper} and {@link UpdateMapper}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
record ReactiveBulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
|
||||
@Nullable ReactiveEntityCallbacks entityCallbacks) {
|
||||
|
||||
public boolean skipEntityCallbacks() {
|
||||
return entityCallbacks == null;
|
||||
}
|
||||
|
||||
public boolean skipEventPublishing() {
|
||||
return eventPublisher == null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return Mono.just(entity);
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, collectionName);
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
|
||||
String collectionName) {
|
||||
|
||||
if (skipEntityCallbacks()) {
|
||||
return Mono.just(entity);
|
||||
}
|
||||
|
||||
return entityCallbacks.callback(callbackType, entity, document, collectionName);
|
||||
}
|
||||
|
||||
public void publishEvent(ApplicationEvent event) {
|
||||
|
||||
if (skipEventPublishing()) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventPublisher.publishEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2021 the original author or authors.
|
||||
* Copyright 2016-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ import java.util.Collection;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||
@@ -29,6 +30,7 @@ import org.springframework.data.mongodb.core.index.ReactiveIndexOperations;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
|
||||
import com.mongodb.client.model.IndexOptions;
|
||||
|
||||
@@ -76,9 +78,9 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName,
|
||||
QueryMapper queryMapper, Optional<Class<?>> type) {
|
||||
|
||||
Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!");
|
||||
Assert.notNull(collectionName, "Collection must not be null!");
|
||||
Assert.notNull(queryMapper, "QueryMapper must not be null!");
|
||||
Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null");
|
||||
Assert.notNull(collectionName, "Collection must not be null");
|
||||
Assert.notNull(queryMapper, "QueryMapper must not be null");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
@@ -86,10 +88,6 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
|
||||
*/
|
||||
public Mono<String> ensureIndex(final IndexDefinition indexDefinition) {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
@@ -108,6 +106,22 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
}).next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Void> alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
|
||||
|
||||
return mongoOperations.execute(db -> {
|
||||
Document indexOptions = new Document("name", name);
|
||||
indexOptions.putAll(options.toDocument());
|
||||
|
||||
return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)))
|
||||
.doOnNext(result -> {
|
||||
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
|
||||
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
|
||||
}
|
||||
});
|
||||
}).then();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private MongoPersistentEntity<?> lookupPersistentEntity(String collection) {
|
||||
|
||||
@@ -119,26 +133,14 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public Mono<Void> dropIndex(final String name) {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes()
|
||||
*/
|
||||
public Mono<Void> dropAllIndexes() {
|
||||
return dropIndex("*");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo()
|
||||
*/
|
||||
public Flux<IndexInfo> getIndexInfo() {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2014-2021 the original author or authors.
|
||||
* Copyright 2014-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -31,7 +31,6 @@ import org.bson.types.ObjectId;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.core.script.ExecutableMongoScript;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -65,41 +64,29 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
*/
|
||||
public DefaultScriptOperations(MongoOperations mongoOperations) {
|
||||
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.ExecutableMongoScript)
|
||||
*/
|
||||
@Override
|
||||
public NamedMongoScript register(ExecutableMongoScript script) {
|
||||
return register(new NamedMongoScript(generateScriptName(), script));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.NamedMongoScript)
|
||||
*/
|
||||
@Override
|
||||
public NamedMongoScript register(NamedMongoScript script) {
|
||||
|
||||
Assert.notNull(script, "Script must not be null!");
|
||||
Assert.notNull(script, "Script must not be null");
|
||||
|
||||
mongoOperations.save(script, SCRIPT_COLLECTION_NAME);
|
||||
return script;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ScriptOperations#execute(org.springframework.data.mongodb.core.script.ExecutableMongoScript, java.lang.Object[])
|
||||
*/
|
||||
@Override
|
||||
public Object execute(final ExecutableMongoScript script, final Object... args) {
|
||||
|
||||
Assert.notNull(script, "Script must not be null!");
|
||||
Assert.notNull(script, "Script must not be null");
|
||||
|
||||
return mongoOperations.execute(new DbCallback<Object>() {
|
||||
|
||||
@@ -115,14 +102,10 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ScriptOperations#call(java.lang.String, java.lang.Object[])
|
||||
*/
|
||||
@Override
|
||||
public Object call(final String scriptName, final Object... args) {
|
||||
|
||||
Assert.hasText(scriptName, "ScriptName must not be null or empty!");
|
||||
Assert.hasText(scriptName, "ScriptName must not be null or empty");
|
||||
|
||||
return mongoOperations.execute(new DbCallback<Object>() {
|
||||
|
||||
@@ -135,22 +118,14 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ScriptOperations#exists(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public boolean exists(String scriptName) {
|
||||
|
||||
Assert.hasText(scriptName, "ScriptName must not be null or empty!");
|
||||
Assert.hasText(scriptName, "ScriptName must not be null or empty");
|
||||
|
||||
return mongoOperations.exists(query(where("_id").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ScriptOperations#getScriptNames()
|
||||
*/
|
||||
@Override
|
||||
public Set<String> getScriptNames() {
|
||||
|
||||
@@ -175,7 +150,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
||||
return args;
|
||||
}
|
||||
|
||||
List<Object> convertedValues = new ArrayList<Object>(args.length);
|
||||
List<Object> convertedValues = new ArrayList<>(args.length);
|
||||
|
||||
for (Object arg : args) {
|
||||
convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2015-2021 the original author or authors.
|
||||
* Copyright 2015-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2010-2021 the original author or authors.
|
||||
* Copyright 2010-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
* Copyright 2021-2023 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user