Compare commits
154 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7047bb23cf | ||
|
|
3876fbfaf3 | ||
|
|
76f25ef2d2 | ||
|
|
d3e9c28375 | ||
|
|
6b7ac32ea0 | ||
|
|
6004c9a9eb | ||
|
|
4d08042966 | ||
|
|
5d53099336 | ||
|
|
4744065a2e | ||
|
|
20ef448094 | ||
|
|
4fcb9f20b9 | ||
|
|
a3081d243b | ||
|
|
c8a4dea162 | ||
|
|
42a4169a25 | ||
|
|
d4b804eaca | ||
|
|
fe0688c044 | ||
|
|
0eea02a859 | ||
|
|
301c01827a | ||
|
|
66c2f1f9c8 | ||
|
|
f7b913535e | ||
|
|
6fa1800787 | ||
|
|
fa574ca5b9 | ||
|
|
83d075673b | ||
|
|
57dacffd07 | ||
|
|
987ba82966 | ||
|
|
4f15880d7b | ||
|
|
fcc3215b73 | ||
|
|
d76c84326b | ||
|
|
f5ea80a346 | ||
|
|
a414e21f42 | ||
|
|
5a999bacf8 | ||
|
|
be714cf702 | ||
|
|
157684a563 | ||
|
|
0548950bbd | ||
|
|
7ecc71299f | ||
|
|
0a97fb92af | ||
|
|
423efcf33e | ||
|
|
9dbea7c120 | ||
|
|
69ba8b5ba9 | ||
|
|
91815a68c4 | ||
|
|
6949e4ad70 | ||
|
|
f53f6b9308 | ||
|
|
fb3bda9e94 | ||
|
|
c9ad0884ec | ||
|
|
eba0a66e1c | ||
|
|
92ace88cac | ||
|
|
24a5fcd5f5 | ||
|
|
4fff480e17 | ||
|
|
51160571fd | ||
|
|
b5c40e7427 | ||
|
|
ee712f67db | ||
|
|
de2a0373bb | ||
|
|
baa897ff2b | ||
|
|
68769004b0 | ||
|
|
d1015d51b0 | ||
|
|
bdc662ebe3 | ||
|
|
8ee33b2e50 | ||
|
|
5f33987741 | ||
|
|
f7cf23510c | ||
|
|
093fa602a8 | ||
|
|
ac538c6d29 | ||
|
|
7b30423356 | ||
|
|
c700e9d0c5 | ||
|
|
fcd6187849 | ||
|
|
62bffc0cf9 | ||
|
|
db4f16dd60 | ||
|
|
c284e4ee23 | ||
|
|
4387cd20e2 | ||
|
|
8423ae515b | ||
|
|
c19d457f61 | ||
|
|
1119a4a0a6 | ||
|
|
162a9abb6b | ||
|
|
a8943a8a0b | ||
|
|
dfa1822d14 | ||
|
|
48aabfbf56 | ||
|
|
e1e8684705 | ||
|
|
228ee1e315 | ||
|
|
52886e1680 | ||
|
|
d7ec16e14b | ||
|
|
b5f23eaa4d | ||
|
|
3342c75ecf | ||
|
|
2ac7fec75c | ||
|
|
593f068301 | ||
|
|
9e0343097f | ||
|
|
3f3eec19fe | ||
|
|
46b54da74b | ||
|
|
f35392f6dd | ||
|
|
5e241c6ea5 | ||
|
|
1078294e3e | ||
|
|
e3ec68b07c | ||
|
|
f93d7e7359 | ||
|
|
aacd04a42f | ||
|
|
59de671387 | ||
|
|
54101a4619 | ||
|
|
e9df28024a | ||
|
|
48c6e1eed5 | ||
|
|
9cdc79a89a | ||
|
|
f2bf878fbe | ||
|
|
4d8019abca | ||
|
|
47864e0cf9 | ||
|
|
ed83c7625e | ||
|
|
0d4b5de2a5 | ||
|
|
24e9841beb | ||
|
|
f130616e68 | ||
|
|
32da9f4336 | ||
|
|
5b83286da4 | ||
|
|
f84d9888dd | ||
|
|
8a9e120047 | ||
|
|
26cccf1f14 | ||
|
|
bfb9c2869c | ||
|
|
705f1b45c8 | ||
|
|
198fcbb1a0 | ||
|
|
a2b3e8562a | ||
|
|
f088c94548 | ||
|
|
b8aa26d150 | ||
|
|
4a24eb22b3 | ||
|
|
a70f592b26 | ||
|
|
a58a3965b7 | ||
|
|
3b0d1e0616 | ||
|
|
f58e462fc8 | ||
|
|
d133ef19dd | ||
|
|
7617099abe | ||
|
|
cb2fe05f44 | ||
|
|
7ce21431a9 | ||
|
|
451d996ae0 | ||
|
|
391d5101d8 | ||
|
|
fda07d9999 | ||
|
|
2bb8643d1b | ||
|
|
8de07fc3ac | ||
|
|
f774e35af2 | ||
|
|
0a95fd9868 | ||
|
|
49e6d53641 | ||
|
|
7a24dcfdd6 | ||
|
|
41d5bba130 | ||
|
|
7332a6dfe8 | ||
|
|
bd673fc8b3 | ||
|
|
3d697a1752 | ||
|
|
d3e4ddc37a | ||
|
|
182536981a | ||
|
|
929a2d5984 | ||
|
|
d32c01c92f | ||
|
|
c52d7a8c4f | ||
|
|
a6bd0fcea7 | ||
|
|
6e3e8210d0 | ||
|
|
c0718662d5 | ||
|
|
62bf12833e | ||
|
|
d4dc13894a | ||
|
|
830fad957e | ||
|
|
ed41116da1 | ||
|
|
bc027be665 | ||
|
|
b98a7063ee | ||
|
|
cb441d5b6c | ||
|
|
b789d0fc67 | ||
|
|
29c90d65af |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
|||||||
#Tue Jun 13 08:53:53 CEST 2023
|
#Mon Feb 20 12:01:58 CET 2023
|
||||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.2/apache-maven-3.9.2-bin.zip
|
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.0/apache-maven-3.9.0-bin.zip
|
||||||
|
|||||||
173
Jenkinsfile
vendored
173
Jenkinsfile
vendored
@@ -9,7 +9,7 @@ pipeline {
|
|||||||
|
|
||||||
triggers {
|
triggers {
|
||||||
pollSCM 'H/10 * * * *'
|
pollSCM 'H/10 * * * *'
|
||||||
upstream(upstreamProjects: "spring-data-commons/3.1.x", threshold: hudson.model.Result.SUCCESS)
|
upstream(upstreamProjects: "spring-data-commons/2.7.x", threshold: hudson.model.Result.SUCCESS)
|
||||||
}
|
}
|
||||||
|
|
||||||
options {
|
options {
|
||||||
@@ -20,7 +20,64 @@ pipeline {
|
|||||||
stages {
|
stages {
|
||||||
stage("Docker images") {
|
stage("Docker images") {
|
||||||
parallel {
|
parallel {
|
||||||
stage('Publish JDK (Java 17) + MongoDB 4.4') {
|
stage('Publish JDK (main) + MongoDB 4.0') {
|
||||||
|
when {
|
||||||
|
anyOf {
|
||||||
|
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||||
|
changeset "ci/pipeline.properties"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
agent { label 'data' }
|
||||||
|
options { timeout(time: 30, unit: 'MINUTES') }
|
||||||
|
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
def image = docker.build("springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.0.version']} ci/openjdk8-mongodb-4.0/")
|
||||||
|
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||||
|
image.push()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Publish JDK (main) + MongoDB 4.4') {
|
||||||
|
when {
|
||||||
|
anyOf {
|
||||||
|
changeset "ci/openjdk8-mongodb-4.4/**"
|
||||||
|
changeset "ci/pipeline.properties"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
agent { label 'data' }
|
||||||
|
options { timeout(time: 30, unit: 'MINUTES') }
|
||||||
|
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk8-mongodb-4.4/")
|
||||||
|
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||||
|
image.push()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Publish JDK (main) + MongoDB 5.0') {
|
||||||
|
when {
|
||||||
|
anyOf {
|
||||||
|
changeset "ci/openjdk8-mongodb-5.0/**"
|
||||||
|
changeset "ci/pipeline.properties"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
agent { label 'data' }
|
||||||
|
options { timeout(time: 30, unit: 'MINUTES') }
|
||||||
|
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
def image = docker.build("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.5.0.version']} ci/openjdk8-mongodb-5.0/")
|
||||||
|
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||||
|
image.push()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stage('Publish JDK (LTS) + MongoDB 4.4') {
|
||||||
when {
|
when {
|
||||||
anyOf {
|
anyOf {
|
||||||
changeset "ci/openjdk17-mongodb-4.4/**"
|
changeset "ci/openjdk17-mongodb-4.4/**"
|
||||||
@@ -32,64 +89,7 @@ pipeline {
|
|||||||
|
|
||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk17-mongodb-4.4/")
|
def image = docker.build("springci/spring-data-with-mongodb-4.4:${p['java.lts.tag']}", "--build-arg BASE=${p['docker.java.lts.image']} --build-arg MONGODB=${p['docker.mongodb.4.4.version']} ci/openjdk17-mongodb-4.4/")
|
||||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
|
||||||
image.push()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Publish JDK (Java 17) + MongoDB 5.0') {
|
|
||||||
when {
|
|
||||||
anyOf {
|
|
||||||
changeset "ci/openjdk17-mongodb-5.0/**"
|
|
||||||
changeset "ci/pipeline.properties"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
agent { label 'data' }
|
|
||||||
options { timeout(time: 30, unit: 'MINUTES') }
|
|
||||||
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
def image = docker.build("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.5.0.version']} ci/openjdk17-mongodb-5.0/")
|
|
||||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
|
||||||
image.push()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Publish JDK (Java 17) + MongoDB 6.0') {
|
|
||||||
when {
|
|
||||||
anyOf {
|
|
||||||
changeset "ci/openjdk17-mongodb-6.0/**"
|
|
||||||
changeset "ci/pipeline.properties"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
agent { label 'data' }
|
|
||||||
options { timeout(time: 30, unit: 'MINUTES') }
|
|
||||||
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk17-mongodb-6.0/")
|
|
||||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
|
||||||
image.push()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stage('Publish JDK (Java 20) + MongoDB 6.0') {
|
|
||||||
when {
|
|
||||||
anyOf {
|
|
||||||
changeset "ci/openjdk20-mongodb-6.0/**"
|
|
||||||
changeset "ci/pipeline.properties"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
agent { label 'data' }
|
|
||||||
options { timeout(time: 30, unit: 'MINUTES') }
|
|
||||||
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
def image = docker.build("springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.6.0.version']} ci/openjdk20-mongodb-6.0/")
|
|
||||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||||
image.push()
|
image.push()
|
||||||
}
|
}
|
||||||
@@ -116,7 +116,7 @@ pipeline {
|
|||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||||
sh 'sleep 10'
|
sh 'sleep 10'
|
||||||
@@ -137,8 +137,29 @@ pipeline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
parallel {
|
parallel {
|
||||||
|
stage("test: mongodb 4.4 (main)") {
|
||||||
|
agent {
|
||||||
|
label 'data'
|
||||||
|
}
|
||||||
|
options { timeout(time: 30, unit: 'MINUTES') }
|
||||||
|
environment {
|
||||||
|
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
script {
|
||||||
|
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||||
|
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||||
|
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||||
|
sh 'sleep 10'
|
||||||
|
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||||
|
sh 'sleep 15'
|
||||||
|
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
stage("test: MongoDB 5.0 (main)") {
|
stage("test: mongodb 5.0 (main)") {
|
||||||
agent {
|
agent {
|
||||||
label 'data'
|
label 'data'
|
||||||
}
|
}
|
||||||
@@ -160,7 +181,7 @@ pipeline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage("test: MongoDB 6.0 (main)") {
|
stage("test: baseline (LTS)") {
|
||||||
agent {
|
agent {
|
||||||
label 'data'
|
label 'data'
|
||||||
}
|
}
|
||||||
@@ -170,33 +191,11 @@ pipeline {
|
|||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-4.4:${p['java.lts.tag']}").inside(p['docker.java.inside.basic']) {
|
||||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||||
sh 'sleep 10'
|
sh 'sleep 10'
|
||||||
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||||
sh 'sleep 15'
|
|
||||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
stage("test: MongoDB 6.0 (next)") {
|
|
||||||
agent {
|
|
||||||
label 'data'
|
|
||||||
}
|
|
||||||
options { timeout(time: 30, unit: 'MINUTES') }
|
|
||||||
environment {
|
|
||||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
|
||||||
}
|
|
||||||
steps {
|
|
||||||
script {
|
|
||||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-6.0:${p['java.next.tag']}").inside(p['docker.java.inside.basic']) {
|
|
||||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
|
||||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
|
||||||
sh 'sleep 10'
|
|
||||||
sh 'mongosh --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
|
||||||
sh 'sleep 15'
|
sh 'sleep 15'
|
||||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||||
}
|
}
|
||||||
|
|||||||
149
README.adoc
149
README.adoc
@@ -93,11 +93,142 @@ and declare the appropriate dependency version.
|
|||||||
</repository>
|
</repository>
|
||||||
----
|
----
|
||||||
|
|
||||||
[[upgrading]]
|
== Upgrading from 2.x
|
||||||
== Upgrading
|
|
||||||
|
|
||||||
Instructions for how to upgrade from earlier versions of Spring Data are provided on the project https://github.com/spring-projects/spring-data-commons/wiki[wiki].
|
The 4.0 MongoDB Java Driver does no longer support certain features that have already been deprecated in one of the last minor versions.
|
||||||
Follow the links in the https://github.com/spring-projects/spring-data-commons/wiki#release-notes[release notes section] to find the version that you want to upgrade to.
|
Some of the changes affect the initial setup configuration as well as compile/runtime features. We summarized the most typical changes one might encounter.
|
||||||
|
|
||||||
|
=== XML Namespace
|
||||||
|
|
||||||
|
.Changed XML Namespace Elements and Attributes:
|
||||||
|
|===
|
||||||
|
| Element / Attribute | 2.x | 3.x
|
||||||
|
|
||||||
|
| `<mongo:mongo-client />`
|
||||||
|
| Used to create a `com.mongodb.MongoClient`
|
||||||
|
| Now exposes a `com.mongodb.client.MongoClient`
|
||||||
|
|
||||||
|
| `<mongo:mongo-client replica-set="..." />`
|
||||||
|
| Was a comma delimited list of replica set members (host/port)
|
||||||
|
| Now defines the replica set name. +
|
||||||
|
Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||||
|
|
||||||
|
| `<mongo:db-factory writeConcern="..." />`
|
||||||
|
| NONE, NORMAL, SAFE, FSYNC_SAFE, REPLICAS_SAFE, MAJORITY
|
||||||
|
| W1, W2, W3, UNACKNOWLEDGED, ACKNOWLEDGED, JOURNALED, MAJORITY
|
||||||
|
|===
|
||||||
|
|
||||||
|
.Removed XML Namespace Elements and Attributes:
|
||||||
|
|===
|
||||||
|
| Element / Attribute | Replacement in 3.x | Comment
|
||||||
|
|
||||||
|
| `<mongo:db-factory mongo-ref="..." />`
|
||||||
|
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||||
|
| Referencing a `com.mongodb.client.MongoClient`.
|
||||||
|
|
||||||
|
| `<mongo:mongo-client credentials="..." />`
|
||||||
|
| `<mongo:mongo-client credential="..." />`
|
||||||
|
| Single authentication data instead of list.
|
||||||
|
|
||||||
|
| `<mongo:client-options />`
|
||||||
|
| `<mongo:client-settings />`
|
||||||
|
| See `com.mongodb.MongoClientSettings` for details.
|
||||||
|
|===
|
||||||
|
|
||||||
|
.New XML Namespace Elements and Attributes:
|
||||||
|
|===
|
||||||
|
| Element | Comment
|
||||||
|
|
||||||
|
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||||
|
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||||
|
|
||||||
|
| `<mongo:db-factory connection-string="..." />`
|
||||||
|
| Replacement for `uri` and `client-uri`.
|
||||||
|
|
||||||
|
| `<mongo:mongo-client connection-string="..." />`
|
||||||
|
| Replacement for `uri` and `client-uri`.
|
||||||
|
|
||||||
|
| `<mongo:client-settings />`
|
||||||
|
| Namespace element for `com.mongodb.MongoClientSettings`.
|
||||||
|
|
||||||
|
|===
|
||||||
|
|
||||||
|
=== Java Configuration
|
||||||
|
|
||||||
|
.Java API changes
|
||||||
|
|===
|
||||||
|
| Type | Comment
|
||||||
|
|
||||||
|
| `MongoClientFactoryBean`
|
||||||
|
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||||
|
Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||||
|
|
||||||
|
| `MongoDataIntegrityViolationException`
|
||||||
|
| Uses `WriteConcernResult` instead of `WriteResult`.
|
||||||
|
|
||||||
|
| `BulkOperationException`
|
||||||
|
| Uses `MongoBulkWriteException` and `com.mongodb.bulk.BulkWriteError` instead of `BulkWriteException` and `com.mongodb.BulkWriteError`
|
||||||
|
|
||||||
|
| `ReactiveMongoClientFactoryBean`
|
||||||
|
| Uses `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||||
|
|
||||||
|
| `ReactiveMongoClientSettingsFactoryBean`
|
||||||
|
| Now produces `com.mongodb.MongoClientSettings` instead of `com.mongodb.async.client.MongoClientSettings`
|
||||||
|
|===
|
||||||
|
|
||||||
|
.Removed Java API:
|
||||||
|
|===
|
||||||
|
| 2.x | Replacement in 3.x | Comment
|
||||||
|
|
||||||
|
| `MongoClientOptionsFactoryBean`
|
||||||
|
| `MongoClientSettingsFactoryBean`
|
||||||
|
| Creating a `com.mongodb.MongoClientSettings`.
|
||||||
|
|
||||||
|
| `AbstractMongoConfiguration`
|
||||||
|
| `AbstractMongoClientConfiguration` +
|
||||||
|
(Available since 2.1)
|
||||||
|
| Using `com.mongodb.client.MongoClient`.
|
||||||
|
|
||||||
|
| `MongoDbFactory#getLegacyDb()`
|
||||||
|
| -
|
||||||
|
| -
|
||||||
|
|
||||||
|
| `SimpleMongoDbFactory`
|
||||||
|
| `SimpleMongoClientDbFactory` +
|
||||||
|
(Available since 2.1)
|
||||||
|
|
|
||||||
|
|
||||||
|
| `MapReduceOptions#getOutputType()`
|
||||||
|
| `MapReduceOptions#getMapReduceAction()`
|
||||||
|
| Returns `MapReduceAction` instead of `MapReduceCommand.OutputType`.
|
||||||
|
|
||||||
|
| `Meta\|Query` maxScan & snapshot
|
||||||
|
|
|
||||||
|
|
|
||||||
|
|===
|
||||||
|
|
||||||
|
=== Other Changes
|
||||||
|
|
||||||
|
==== UUID Types
|
||||||
|
|
||||||
|
The MongoDB UUID representation can now be configured with different formats.
|
||||||
|
This has to be done via `MongoClientSettings` as shown in the snippet below.
|
||||||
|
|
||||||
|
.UUID Codec Configuration
|
||||||
|
====
|
||||||
|
[source,java]
|
||||||
|
----
|
||||||
|
static class Config extends AbstractMongoClientConfiguration {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configureClientSettings(MongoClientSettings.Builder builder) {
|
||||||
|
builder.uuidRepresentation(UuidRepresentation.STANDARD);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
----
|
||||||
|
====
|
||||||
|
|
||||||
[[getting-help]]
|
[[getting-help]]
|
||||||
== Getting Help
|
== Getting Help
|
||||||
@@ -146,12 +277,12 @@ and accessible from Maven using the Maven configuration noted <<maven-configurat
|
|||||||
NOTE: Configuration for Gradle is similar to Maven.
|
NOTE: Configuration for Gradle is similar to Maven.
|
||||||
|
|
||||||
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
||||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||||
to build a reactive one.
|
to build a reactive one.
|
||||||
|
|
||||||
However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper]
|
However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper]
|
||||||
and minimally, JDK 17 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
and minimally, JDK 8 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||||
|
|
||||||
In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download]
|
In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download]
|
||||||
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
||||||
@@ -182,7 +313,7 @@ To initialize the replica set, start a mongo client:
|
|||||||
[source,bash]
|
[source,bash]
|
||||||
----
|
----
|
||||||
$ $MONGODB_HOME/bin/mongo
|
$ $MONGODB_HOME/bin/mongo
|
||||||
MongoDB server version: 6.0.0
|
MongoDB server version: 5.0.0
|
||||||
...
|
...
|
||||||
----
|
----
|
||||||
|
|
||||||
@@ -210,7 +341,7 @@ Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw
|
|||||||
$ ./mvnw clean install
|
$ ./mvnw clean install
|
||||||
----
|
----
|
||||||
|
|
||||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above].
|
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||||
|
|
||||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
||||||
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||||
|
|||||||
@@ -9,13 +9,11 @@ ENV DEBIAN_FRONTEND=noninteractive
|
|||||||
RUN set -eux; \
|
RUN set -eux; \
|
||||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 && \
|
||||||
# MongoDB 6.0 release signing key
|
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 && \
|
||||||
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
|
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list && \
|
||||||
# Needed when MongoDB creates a 6.0 folder.
|
|
||||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
|
|
||||||
echo ${TZ} > /etc/timezone
|
echo ${TZ} > /etc/timezone
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
22
ci/openjdk8-mongodb-4.0/Dockerfile
Normal file
22
ci/openjdk8-mongodb-4.0/Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
ARG BASE
|
||||||
|
FROM ${BASE}
|
||||||
|
# Any ARG statements before FROM are cleared.
|
||||||
|
ARG MONGODB
|
||||||
|
|
||||||
|
ENV TZ=Etc/UTC
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
RUN set -eux; \
|
||||||
|
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
|
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
|
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||||
|
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||||
|
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 && \
|
||||||
|
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 && \
|
||||||
|
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list && \
|
||||||
|
echo ${TZ} > /etc/timezone
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
@@ -9,16 +9,16 @@ ENV DEBIAN_FRONTEND=noninteractive
|
|||||||
RUN set -eux; \
|
RUN set -eux; \
|
||||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 && \
|
||||||
# MongoDB 6.0 release signing key
|
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 && \
|
||||||
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | apt-key add - && \
|
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list && \
|
||||||
# Needed when MongoDB creates a 6.0 folder.
|
|
||||||
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/6.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
|
|
||||||
echo ${TZ} > /etc/timezone
|
echo ${TZ} > /etc/timezone
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
|
ln -T /bin/true /usr/bin/systemctl && \
|
||||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \
|
||||||
|
rm /usr/bin/systemctl && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
@@ -9,7 +9,7 @@ ENV DEBIAN_FRONTEND=noninteractive
|
|||||||
RUN set -eux; \
|
RUN set -eux; \
|
||||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \
|
||||||
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
sed -i -e 's/http/https/g' /etc/apt/sources.list && \
|
||||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \
|
||||||
# MongoDB 5.0 release signing key
|
# MongoDB 5.0 release signing key
|
||||||
@@ -1,15 +1,17 @@
|
|||||||
# Java versions
|
# Java versions
|
||||||
java.main.tag=17.0.6_10-jdk-focal
|
java.main.tag=8u362-b09-jdk-focal
|
||||||
java.next.tag=20-jdk-jammy
|
java.next.tag=11.0.18_10-jdk-focal
|
||||||
|
java.lts.tag=17.0.6_10-jdk-focal
|
||||||
|
|
||||||
# Docker container images - standard
|
# Docker container images - standard
|
||||||
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
|
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
|
||||||
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
|
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
|
||||||
|
docker.java.lts.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.lts.tag}
|
||||||
|
|
||||||
# Supported versions of MongoDB
|
# Supported versions of MongoDB
|
||||||
|
docker.mongodb.4.0.version=4.0.28
|
||||||
docker.mongodb.4.4.version=4.4.18
|
docker.mongodb.4.4.version=4.4.18
|
||||||
docker.mongodb.5.0.version=5.0.14
|
docker.mongodb.5.0.version=5.0.14
|
||||||
docker.mongodb.6.0.version=6.0.4
|
|
||||||
|
|
||||||
# Supported versions of Redis
|
# Supported versions of Redis
|
||||||
docker.redis.6.version=6.2.10
|
docker.redis.6.version=6.2.10
|
||||||
|
|||||||
35
pom.xml
35
pom.xml
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
<groupId>org.springframework.data</groupId>
|
<groupId>org.springframework.data</groupId>
|
||||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||||
<version>4.1.1</version>
|
<version>3.4.10</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
||||||
<name>Spring Data MongoDB</name>
|
<name>Spring Data MongoDB</name>
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>org.springframework.data.build</groupId>
|
<groupId>org.springframework.data.build</groupId>
|
||||||
<artifactId>spring-data-parent</artifactId>
|
<artifactId>spring-data-parent</artifactId>
|
||||||
<version>3.1.1</version>
|
<version>2.7.10</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<modules>
|
<modules>
|
||||||
@@ -26,8 +26,8 @@
|
|||||||
<properties>
|
<properties>
|
||||||
<project.type>multi</project.type>
|
<project.type>multi</project.type>
|
||||||
<dist.id>spring-data-mongodb</dist.id>
|
<dist.id>spring-data-mongodb</dist.id>
|
||||||
<springdata.commons>3.1.1</springdata.commons>
|
<springdata.commons>2.7.10</springdata.commons>
|
||||||
<mongo>4.9.1</mongo>
|
<mongo>4.6.1</mongo>
|
||||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||||
<jmh.version>1.19</jmh.version>
|
<jmh.version>1.19</jmh.version>
|
||||||
</properties>
|
</properties>
|
||||||
@@ -144,8 +144,31 @@
|
|||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<repositories>
|
<repositories>
|
||||||
|
<repository>
|
||||||
|
<id>spring-libs-release</id>
|
||||||
|
<url>https://repo.spring.io/libs-release</url>
|
||||||
|
</repository>
|
||||||
|
<repository>
|
||||||
|
<id>sonatype-libs-snapshot</id>
|
||||||
|
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||||
|
<releases>
|
||||||
|
<enabled>false</enabled>
|
||||||
|
</releases>
|
||||||
|
<snapshots>
|
||||||
|
<enabled>true</enabled>
|
||||||
|
</snapshots>
|
||||||
|
</repository>
|
||||||
</repositories>
|
</repositories>
|
||||||
|
|
||||||
|
<pluginRepositories>
|
||||||
|
<pluginRepository>
|
||||||
|
<id>spring-plugins-release</id>
|
||||||
|
<url>https://repo.spring.io/plugins-release</url>
|
||||||
|
</pluginRepository>
|
||||||
|
<pluginRepository>
|
||||||
|
<id>spring-libs-milestone</id>
|
||||||
|
<url>https://repo.spring.io/libs-milestone</url>
|
||||||
|
</pluginRepository>
|
||||||
|
</pluginRepositories>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>org.springframework.data</groupId>
|
<groupId>org.springframework.data</groupId>
|
||||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||||
<version>4.1.1</version>
|
<version>3.4.10</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
|
|||||||
@@ -322,7 +322,7 @@ public class AbstractMicrobenchmark {
|
|||||||
try {
|
try {
|
||||||
ResultsWriter.forUri(uri).write(results);
|
ResultsWriter.forUri(uri).write(results);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
System.err.println(String.format("Cannot save benchmark results to '%s'; Error was %s", uri, e));
|
System.err.println(String.format("Cannot save benchmark results to '%s'. Error was %s.", uri, e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -96,14 +96,15 @@ class MongoResultsWriter implements ResultsWriter {
|
|||||||
for (Object key : doc.keySet()) {
|
for (Object key : doc.keySet()) {
|
||||||
|
|
||||||
Object value = doc.get(key);
|
Object value = doc.get(key);
|
||||||
if (value instanceof Document document) {
|
if (value instanceof Document) {
|
||||||
value = fixDocumentKeys(document);
|
value = fixDocumentKeys((Document) value);
|
||||||
} else if (value instanceof BasicDBObject basicDBObject) {
|
} else if (value instanceof BasicDBObject) {
|
||||||
value = fixDocumentKeys(new Document(basicDBObject));
|
value = fixDocumentKeys(new Document((BasicDBObject) value));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (key instanceof String newKey) {
|
if (key instanceof String) {
|
||||||
|
|
||||||
|
String newKey = (String) key;
|
||||||
if (newKey.contains(".")) {
|
if (newKey.contains(".")) {
|
||||||
newKey = newKey.replace('.', ',');
|
newKey = newKey.replace('.', ',');
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
@@ -15,18 +14,13 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>org.springframework.data</groupId>
|
<groupId>org.springframework.data</groupId>
|
||||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||||
<version>4.1.1</version>
|
<version>3.4.10</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.root>${basedir}/..</project.root>
|
<project.root>${basedir}/..</project.root>
|
||||||
<dist.key>SDMONGO</dist.key>
|
<dist.key>SDMONGO</dist.key>
|
||||||
|
|
||||||
<!-- Observability -->
|
|
||||||
<micrometer-docs-generator.inputPath>${maven.multiModuleProjectDirectory}/spring-data-mongodb/</micrometer-docs-generator.inputPath>
|
|
||||||
<micrometer-docs-generator.inclusionPattern>.*</micrometer-docs-generator.inclusionPattern>
|
|
||||||
<micrometer-docs-generator.outputPath>${maven.multiModuleProjectDirectory}/target/</micrometer-docs-generator.outputPath>
|
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
@@ -35,43 +29,12 @@
|
|||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-assembly-plugin</artifactId>
|
<artifactId>maven-assembly-plugin</artifactId>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
|
||||||
<artifactId>exec-maven-plugin</artifactId>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>generate-docs</id>
|
|
||||||
<phase>generate-resources</phase>
|
|
||||||
<goals>
|
|
||||||
<goal>java</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<mainClass>io.micrometer.docs.DocsGeneratorCommand</mainClass>
|
|
||||||
<includePluginDependencies>true</includePluginDependencies>
|
|
||||||
<arguments>
|
|
||||||
<argument>${micrometer-docs-generator.inputPath}</argument>
|
|
||||||
<argument>${micrometer-docs-generator.inclusionPattern}</argument>
|
|
||||||
<argument>${micrometer-docs-generator.outputPath}</argument>
|
|
||||||
</arguments>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>io.micrometer</groupId>
|
|
||||||
<artifactId>micrometer-docs-generator</artifactId>
|
|
||||||
<version>1.0.1</version>
|
|
||||||
<type>jar</type>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.asciidoctor</groupId>
|
<groupId>org.asciidoctor</groupId>
|
||||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<attributes>
|
<attributes>
|
||||||
<mongo-reactivestreams>${mongo.reactivestreams}
|
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||||
</mongo-reactivestreams>
|
|
||||||
<reactor>${reactor}</reactor>
|
<reactor>${reactor}</reactor>
|
||||||
</attributes>
|
</attributes>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
@@ -13,7 +11,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>org.springframework.data</groupId>
|
<groupId>org.springframework.data</groupId>
|
||||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||||
<version>4.1.1</version>
|
<version>3.4.10</version>
|
||||||
<relativePath>../pom.xml</relativePath>
|
<relativePath>../pom.xml</relativePath>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
@@ -112,13 +110,6 @@
|
|||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.mongodb</groupId>
|
|
||||||
<artifactId>mongodb-crypt</artifactId>
|
|
||||||
<version>1.6.1</version>
|
|
||||||
<optional>true</optional>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.projectreactor</groupId>
|
<groupId>io.projectreactor</groupId>
|
||||||
<artifactId>reactor-core</artifactId>
|
<artifactId>reactor-core</artifactId>
|
||||||
@@ -131,6 +122,27 @@
|
|||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.reactivex</groupId>
|
||||||
|
<artifactId>rxjava</artifactId>
|
||||||
|
<version>${rxjava}</version>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.reactivex</groupId>
|
||||||
|
<artifactId>rxjava-reactive-streams</artifactId>
|
||||||
|
<version>${rxjava-reactive-streams}</version>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.reactivex.rxjava2</groupId>
|
||||||
|
<artifactId>rxjava</artifactId>
|
||||||
|
<version>${rxjava2}</version>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.reactivex.rxjava3</groupId>
|
<groupId>io.reactivex.rxjava3</groupId>
|
||||||
<artifactId>rxjava</artifactId>
|
<artifactId>rxjava</artifactId>
|
||||||
@@ -140,6 +152,12 @@
|
|||||||
|
|
||||||
<!-- CDI -->
|
<!-- CDI -->
|
||||||
<!-- Dependency order required to build against CDI 1.0 and test with CDI 2.0 -->
|
<!-- Dependency order required to build against CDI 1.0 and test with CDI 2.0 -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.geronimo.specs</groupId>
|
||||||
|
<artifactId>geronimo-jcdi_2.0_spec</artifactId>
|
||||||
|
<version>1.0.1</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>javax.interceptor</groupId>
|
<groupId>javax.interceptor</groupId>
|
||||||
@@ -149,48 +167,31 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>jakarta.enterprise</groupId>
|
<groupId>javax.enterprise</groupId>
|
||||||
<artifactId>jakarta.enterprise.cdi-api</artifactId>
|
<artifactId>cdi-api</artifactId>
|
||||||
<version>${cdi}</version>
|
<version>${cdi}</version>
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>jakarta.annotation</groupId>
|
<groupId>javax.annotation</groupId>
|
||||||
<artifactId>jakarta.annotation-api</artifactId>
|
<artifactId>javax.annotation-api</artifactId>
|
||||||
<version>${jakarta-annotation-api}</version>
|
<version>${javax-annotation-api}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.openwebbeans</groupId>
|
<groupId>org.apache.openwebbeans</groupId>
|
||||||
<artifactId>openwebbeans-se</artifactId>
|
<artifactId>openwebbeans-se</artifactId>
|
||||||
<classifier>jakarta</classifier>
|
|
||||||
<version>${webbeans}</version>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.openwebbeans</groupId>
|
|
||||||
<artifactId>openwebbeans-spi</artifactId>
|
|
||||||
<classifier>jakarta</classifier>
|
|
||||||
<version>${webbeans}</version>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.openwebbeans</groupId>
|
|
||||||
<artifactId>openwebbeans-impl</artifactId>
|
|
||||||
<classifier>jakarta</classifier>
|
|
||||||
<version>${webbeans}</version>
|
<version>${webbeans}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- JSR 303 Validation -->
|
<!-- JSR 303 Validation -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>jakarta.validation</groupId>
|
<groupId>javax.validation</groupId>
|
||||||
<artifactId>jakarta.validation-api</artifactId>
|
<artifactId>validation-api</artifactId>
|
||||||
<version>${validation}</version>
|
<version>${validation}</version>
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
@@ -203,37 +204,30 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.micrometer</groupId>
|
<groupId>org.hibernate</groupId>
|
||||||
<artifactId>micrometer-observation</artifactId>
|
|
||||||
<optional>true</optional>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>io.micrometer</groupId>
|
|
||||||
<artifactId>micrometer-tracing</artifactId>
|
|
||||||
<optional>true</optional>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.hibernate.validator</groupId>
|
|
||||||
<artifactId>hibernate-validator</artifactId>
|
<artifactId>hibernate-validator</artifactId>
|
||||||
<version>7.0.1.Final</version>
|
<version>5.4.3.Final</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>jakarta.el</groupId>
|
<groupId>org.glassfish</groupId>
|
||||||
<artifactId>jakarta.el-api</artifactId>
|
<artifactId>javax.el</artifactId>
|
||||||
<version>4.0.0</version>
|
<version>3.0.1-b11</version>
|
||||||
<scope>provided</scope>
|
<scope>test</scope>
|
||||||
<optional>true</optional>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.glassfish</groupId>
|
<groupId>joda-time</groupId>
|
||||||
<artifactId>jakarta.el</artifactId>
|
<artifactId>joda-time</artifactId>
|
||||||
<version>4.0.2</version>
|
<version>${jodatime}</version>
|
||||||
<scope>provided</scope>
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.threeten</groupId>
|
||||||
|
<artifactId>threetenbp</artifactId>
|
||||||
|
<version>${threetenbp}</version>
|
||||||
<optional>true</optional>
|
<optional>true</optional>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
@@ -278,9 +272,9 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>jakarta.transaction</groupId>
|
<groupId>javax.transaction</groupId>
|
||||||
<artifactId>jakarta.transaction-api</artifactId>
|
<artifactId>jta</artifactId>
|
||||||
<version>2.0.0</version>
|
<version>1.1</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
@@ -316,29 +310,6 @@
|
|||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>io.micrometer</groupId>
|
|
||||||
<artifactId>micrometer-test</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.github.tomakehurst</groupId>
|
|
||||||
<artifactId>wiremock-jre8-standalone</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>io.micrometer</groupId>
|
|
||||||
<artifactId>micrometer-tracing-test</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>io.micrometer</groupId>
|
|
||||||
<artifactId>micrometer-tracing-integration-test</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<!-- jMolecules -->
|
<!-- jMolecules -->
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -372,11 +343,8 @@
|
|||||||
<goal>test-process</goal>
|
<goal>test-process</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<outputDirectory>target/generated-test-sources
|
<outputDirectory>target/generated-test-sources</outputDirectory>
|
||||||
</outputDirectory>
|
<processor>org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor</processor>
|
||||||
<processor>
|
|
||||||
org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor
|
|
||||||
</processor>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
@@ -396,9 +364,7 @@
|
|||||||
<exclude>**/ReactivePerformanceTests.java</exclude>
|
<exclude>**/ReactivePerformanceTests.java</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
<systemPropertyVariables>
|
<systemPropertyVariables>
|
||||||
<java.util.logging.config.file>
|
<java.util.logging.config.file>src/test/resources/logging.properties</java.util.logging.config.file>
|
||||||
src/test/resources/logging.properties
|
|
||||||
</java.util.logging.config.file>
|
|
||||||
<reactor.trace.cancel>true</reactor.trace.cancel>
|
<reactor.trace.cancel>true</reactor.trace.cancel>
|
||||||
</systemPropertyVariables>
|
</systemPropertyVariables>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|||||||
@@ -103,11 +103,19 @@ public class BindableMongoExpression implements MongoExpression {
|
|||||||
return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
|
return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoExpression#toDocument()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Document toDocument() {
|
public Document toDocument() {
|
||||||
return target.get();
|
return target.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see java.lang.Object#toString()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
|
return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ public interface CodecRegistryProvider {
|
|||||||
*/
|
*/
|
||||||
default <T> Optional<Codec<T>> getCodecFor(Class<T> type) {
|
default <T> Optional<Codec<T>> getCodecFor(Class<T> type) {
|
||||||
|
|
||||||
Assert.notNull(type, "Type must not be null");
|
Assert.notNull(type, "Type must not be null!");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return Optional.of(getCodecRegistry().get(type));
|
return Optional.of(getCodecRegistry().get(type));
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ public class MongoDatabaseUtils {
|
|||||||
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory,
|
||||||
SessionSynchronization sessionSynchronization) {
|
SessionSynchronization sessionSynchronization) {
|
||||||
|
|
||||||
Assert.notNull(factory, "Factory must not be null");
|
Assert.notNull(factory, "Factory must not be null!");
|
||||||
|
|
||||||
if (sessionSynchronization == SessionSynchronization.NEVER
|
if (sessionSynchronization == SessionSynchronization.NEVER
|
||||||
|| !TransactionSynchronizationManager.isSynchronizationActive()) {
|
|| !TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||||
@@ -193,11 +193,19 @@ public class MongoDatabaseUtils {
|
|||||||
this.resourceHolder = resourceHolder;
|
this.resourceHolder = resourceHolder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.ResourceHolderSynchronization#shouldReleaseBeforeCompletion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected boolean shouldReleaseBeforeCompletion() {
|
protected boolean shouldReleaseBeforeCompletion() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.ResourceHolderSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) {
|
protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) {
|
||||||
|
|
||||||
@@ -206,6 +214,10 @@ public class MongoDatabaseUtils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.ResourceHolderSynchronization#afterCompletion(int)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void afterCompletion(int status) {
|
public void afterCompletion(int status) {
|
||||||
|
|
||||||
@@ -216,6 +228,10 @@ public class MongoDatabaseUtils {
|
|||||||
super.afterCompletion(status);
|
super.afterCompletion(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.ResourceHolderSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) {
|
protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) {
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,57 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2011-2023 the original author or authors.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.springframework.data.mongodb;
|
||||||
|
|
||||||
|
import org.springframework.dao.DataAccessException;
|
||||||
|
|
||||||
|
import com.mongodb.client.MongoDatabase;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for factories creating {@link MongoDatabase} instances.
|
||||||
|
*
|
||||||
|
* @author Mark Pollack
|
||||||
|
* @author Thomas Darimont
|
||||||
|
* @author Christoph Strobl
|
||||||
|
* @deprecated since 3.0, use {@link MongoDatabaseFactory} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
public interface MongoDbFactory extends MongoDatabaseFactory {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a default {@link MongoDatabase} instance.
|
||||||
|
*
|
||||||
|
* @return never {@literal null}.
|
||||||
|
* @throws DataAccessException
|
||||||
|
* @deprecated since 3.0. Use {@link #getMongoDatabase()} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
default MongoDatabase getDb() throws DataAccessException {
|
||||||
|
return getMongoDatabase();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain a {@link MongoDatabase} instance to access the database with the given name.
|
||||||
|
*
|
||||||
|
* @param dbName must not be {@literal null} or empty.
|
||||||
|
* @return never {@literal null}.
|
||||||
|
* @throws DataAccessException
|
||||||
|
* @deprecated since 3.0. Use {@link #getMongoDatabase(String)} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
default MongoDatabase getDb(String dbName) throws DataAccessException {
|
||||||
|
return getMongoDatabase(dbName);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2022-2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
|
|
||||||
import org.springframework.data.domain.ManagedTypes;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public final class MongoManagedTypes implements ManagedTypes {
|
|
||||||
|
|
||||||
private final ManagedTypes delegate;
|
|
||||||
|
|
||||||
private MongoManagedTypes(ManagedTypes types) {
|
|
||||||
this.delegate = types;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wraps an existing {@link ManagedTypes} object with {@link MongoManagedTypes}.
|
|
||||||
*
|
|
||||||
* @param managedTypes
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public static MongoManagedTypes from(ManagedTypes managedTypes) {
|
|
||||||
return new MongoManagedTypes(managedTypes);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory method used to construct {@link MongoManagedTypes} from the given array of {@link Class types}.
|
|
||||||
*
|
|
||||||
* @param types array of {@link Class types} used to initialize the {@link ManagedTypes}; must not be {@literal null}.
|
|
||||||
* @return new instance of {@link MongoManagedTypes} initialized from {@link Class types}.
|
|
||||||
*/
|
|
||||||
public static MongoManagedTypes from(Class<?>... types) {
|
|
||||||
return fromIterable(Arrays.asList(types));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory method used to construct {@link MongoManagedTypes} from the given, required {@link Iterable} of
|
|
||||||
* {@link Class types}.
|
|
||||||
*
|
|
||||||
* @param types {@link Iterable} of {@link Class types} used to initialize the {@link ManagedTypes}; must not be
|
|
||||||
* {@literal null}.
|
|
||||||
* @return new instance of {@link MongoManagedTypes} initialized the given, required {@link Iterable} of {@link Class
|
|
||||||
* types}.
|
|
||||||
*/
|
|
||||||
public static MongoManagedTypes fromIterable(Iterable<? extends Class<?>> types) {
|
|
||||||
return from(ManagedTypes.fromIterable(types));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory method to return an empty {@link MongoManagedTypes} object.
|
|
||||||
*
|
|
||||||
* @return an empty {@link MongoManagedTypes} object.
|
|
||||||
*/
|
|
||||||
public static MongoManagedTypes empty() {
|
|
||||||
return from(ManagedTypes.empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forEach(Consumer<Class<?>> action) {
|
|
||||||
delegate.forEach(action);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -68,7 +68,7 @@ class MongoResourceHolder extends ResourceHolderSupport {
|
|||||||
ClientSession session = getSession();
|
ClientSession session = getSession();
|
||||||
|
|
||||||
if (session == null) {
|
if (session == null) {
|
||||||
throw new IllegalStateException("No session available");
|
throw new IllegalStateException("No session available!");
|
||||||
}
|
}
|
||||||
|
|
||||||
return session;
|
return session;
|
||||||
|
|||||||
@@ -100,12 +100,16 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
*/
|
*/
|
||||||
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
public MongoTransactionManager(MongoDatabaseFactory dbFactory, @Nullable TransactionOptions options) {
|
||||||
|
|
||||||
Assert.notNull(dbFactory, "DbFactory must not be null");
|
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||||
|
|
||||||
this.dbFactory = dbFactory;
|
this.dbFactory = dbFactory;
|
||||||
this.options = options;
|
this.options = options;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doGetTransaction()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Object doGetTransaction() throws TransactionException {
|
protected Object doGetTransaction() throws TransactionException {
|
||||||
|
|
||||||
@@ -114,11 +118,19 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
return new MongoTransactionObject(resourceHolder);
|
return new MongoTransactionObject(resourceHolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#isExistingTransaction(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doBegin(java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException {
|
protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException {
|
||||||
|
|
||||||
@@ -148,6 +160,10 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder);
|
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSuspend(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Object doSuspend(Object transaction) throws TransactionException {
|
protected Object doSuspend(Object transaction) throws TransactionException {
|
||||||
|
|
||||||
@@ -157,11 +173,19 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory());
|
return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doResume(java.lang.Object, java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doResume(@Nullable Object transaction, Object suspendedResources) {
|
protected void doResume(@Nullable Object transaction, Object suspendedResources) {
|
||||||
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources);
|
TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCommit(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected final void doCommit(DefaultTransactionStatus status) throws TransactionException {
|
protected final void doCommit(DefaultTransactionStatus status) throws TransactionException {
|
||||||
|
|
||||||
@@ -212,6 +236,10 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
transactionObject.commitTransaction();
|
transactionObject.commitTransaction();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doRollback(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doRollback(DefaultTransactionStatus status) throws TransactionException {
|
protected void doRollback(DefaultTransactionStatus status) throws TransactionException {
|
||||||
|
|
||||||
@@ -231,6 +259,10 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doSetRollbackOnly(org.springframework.transaction.support.DefaultTransactionStatus)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException {
|
protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException {
|
||||||
|
|
||||||
@@ -238,6 +270,10 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.transaction.support.AbstractPlatformTransactionManager#doCleanupAfterCompletion(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doCleanupAfterCompletion(Object transaction) {
|
protected void doCleanupAfterCompletion(Object transaction) {
|
||||||
|
|
||||||
@@ -266,7 +302,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
*/
|
*/
|
||||||
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
public void setDbFactory(MongoDatabaseFactory dbFactory) {
|
||||||
|
|
||||||
Assert.notNull(dbFactory, "DbFactory must not be null");
|
Assert.notNull(dbFactory, "DbFactory must not be null!");
|
||||||
this.dbFactory = dbFactory;
|
this.dbFactory = dbFactory;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -289,11 +325,19 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
return dbFactory;
|
return dbFactory;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MongoDatabaseFactory getResourceFactory() {
|
public MongoDatabaseFactory getResourceFactory() {
|
||||||
return getRequiredDbFactory();
|
return getRequiredDbFactory();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void afterPropertiesSet() {
|
public void afterPropertiesSet() {
|
||||||
getRequiredDbFactory();
|
getRequiredDbFactory();
|
||||||
@@ -315,7 +359,7 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
private MongoDatabaseFactory getRequiredDbFactory() {
|
private MongoDatabaseFactory getRequiredDbFactory() {
|
||||||
|
|
||||||
Assert.state(dbFactory != null,
|
Assert.state(dbFactory != null,
|
||||||
"MongoTransactionManager operates upon a MongoDbFactory; Did you forget to provide one; It's required");
|
"MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required.");
|
||||||
|
|
||||||
return dbFactory;
|
return dbFactory;
|
||||||
}
|
}
|
||||||
@@ -450,22 +494,30 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
|||||||
|
|
||||||
private MongoResourceHolder getRequiredResourceHolder() {
|
private MongoResourceHolder getRequiredResourceHolder() {
|
||||||
|
|
||||||
Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present; o_O");
|
Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present. o_O");
|
||||||
return resourceHolder;
|
return resourceHolder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ClientSession getRequiredSession() {
|
private ClientSession getRequiredSession() {
|
||||||
|
|
||||||
ClientSession session = getSession();
|
ClientSession session = getSession();
|
||||||
Assert.state(session != null, "A Session is required but it turned out to be null");
|
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||||
return session;
|
return session;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isRollbackOnly() {
|
public boolean isRollbackOnly() {
|
||||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void flush() {
|
public void flush() {
|
||||||
TransactionSynchronizationUtils.triggerFlush();
|
TransactionSynchronizationUtils.triggerFlush();
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ public class ReactiveMongoDatabaseUtils {
|
|||||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||||
SessionSynchronization sessionSynchronization) {
|
SessionSynchronization sessionSynchronization) {
|
||||||
|
|
||||||
Assert.notNull(factory, "DatabaseFactory must not be null");
|
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||||
|
|
||||||
if (sessionSynchronization == SessionSynchronization.NEVER) {
|
if (sessionSynchronization == SessionSynchronization.NEVER) {
|
||||||
return getMongoDatabaseOrDefault(dbName, factory);
|
return getMongoDatabaseOrDefault(dbName, factory);
|
||||||
@@ -214,11 +214,19 @@ public class ReactiveMongoDatabaseUtils {
|
|||||||
this.resourceHolder = resourceHolder;
|
this.resourceHolder = resourceHolder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected boolean shouldReleaseBeforeCompletion() {
|
protected boolean shouldReleaseBeforeCompletion() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||||
|
|
||||||
@@ -229,6 +237,10 @@ public class ReactiveMongoDatabaseUtils {
|
|||||||
return Mono.empty();
|
return Mono.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Mono<Void> afterCompletion(int status) {
|
public Mono<Void> afterCompletion(int status) {
|
||||||
|
|
||||||
@@ -244,6 +256,10 @@ public class ReactiveMongoDatabaseUtils {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||||
|
|
||||||
|
|||||||
@@ -104,12 +104,16 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||||
@Nullable TransactionOptions options) {
|
@Nullable TransactionOptions options) {
|
||||||
|
|
||||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null");
|
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||||
|
|
||||||
this.databaseFactory = databaseFactory;
|
this.databaseFactory = databaseFactory;
|
||||||
this.options = options;
|
this.options = options;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||||
throws TransactionException {
|
throws TransactionException {
|
||||||
@@ -119,11 +123,19 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||||
TransactionDefinition definition) throws TransactionException {
|
TransactionDefinition definition) throws TransactionException {
|
||||||
@@ -163,6 +175,10 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||||
throws TransactionException {
|
throws TransactionException {
|
||||||
@@ -176,6 +192,10 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||||
Object suspendedResources) {
|
Object suspendedResources) {
|
||||||
@@ -183,6 +203,10 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||||
GenericReactiveTransaction status) throws TransactionException {
|
GenericReactiveTransaction status) throws TransactionException {
|
||||||
@@ -219,6 +243,10 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
return transactionObject.commitTransaction();
|
return transactionObject.commitTransaction();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||||
GenericReactiveTransaction status) {
|
GenericReactiveTransaction status) {
|
||||||
@@ -240,6 +268,10 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||||
GenericReactiveTransaction status) throws TransactionException {
|
GenericReactiveTransaction status) throws TransactionException {
|
||||||
@@ -250,6 +282,10 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||||
Object transaction) {
|
Object transaction) {
|
||||||
@@ -281,7 +317,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
*/
|
*/
|
||||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||||
|
|
||||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null");
|
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||||
this.databaseFactory = databaseFactory;
|
this.databaseFactory = databaseFactory;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -304,6 +340,10 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
return databaseFactory;
|
return databaseFactory;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void afterPropertiesSet() {
|
public void afterPropertiesSet() {
|
||||||
getRequiredDatabaseFactory();
|
getRequiredDatabaseFactory();
|
||||||
@@ -323,7 +363,7 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||||
|
|
||||||
Assert.state(databaseFactory != null,
|
Assert.state(databaseFactory != null,
|
||||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory; Did you forget to provide one; It's required");
|
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||||
|
|
||||||
return databaseFactory;
|
return databaseFactory;
|
||||||
}
|
}
|
||||||
@@ -458,22 +498,30 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
|||||||
|
|
||||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||||
|
|
||||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present; o_O");
|
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||||
return resourceHolder;
|
return resourceHolder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ClientSession getRequiredSession() {
|
private ClientSession getRequiredSession() {
|
||||||
|
|
||||||
ClientSession session = getSession();
|
ClientSession session = getSession();
|
||||||
Assert.state(session != null, "A Session is required but it turned out to be null");
|
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||||
return session;
|
return session;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isRollbackOnly() {
|
public boolean isRollbackOnly() {
|
||||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void flush() {
|
public void flush() {
|
||||||
throw new UnsupportedOperationException("flush() not supported");
|
throw new UnsupportedOperationException("flush() not supported");
|
||||||
|
|||||||
@@ -76,13 +76,13 @@ public class SessionAwareMethodInterceptor<D, C> implements MethodInterceptor {
|
|||||||
Class<D> databaseType, ClientSessionOperator<D> databaseDecorator, Class<C> collectionType,
|
Class<D> databaseType, ClientSessionOperator<D> databaseDecorator, Class<C> collectionType,
|
||||||
ClientSessionOperator<C> collectionDecorator) {
|
ClientSessionOperator<C> collectionDecorator) {
|
||||||
|
|
||||||
Assert.notNull(session, "ClientSession must not be null");
|
Assert.notNull(session, "ClientSession must not be null!");
|
||||||
Assert.notNull(target, "Target must not be null");
|
Assert.notNull(target, "Target must not be null!");
|
||||||
Assert.notNull(sessionType, "SessionType must not be null");
|
Assert.notNull(sessionType, "SessionType must not be null!");
|
||||||
Assert.notNull(databaseType, "Database type must not be null");
|
Assert.notNull(databaseType, "Database type must not be null!");
|
||||||
Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null");
|
Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null!");
|
||||||
Assert.notNull(collectionType, "Collection type must not be null");
|
Assert.notNull(collectionType, "Collection type must not be null!");
|
||||||
Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null");
|
Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null!");
|
||||||
|
|
||||||
this.session = session;
|
this.session = session;
|
||||||
this.target = target;
|
this.target = target;
|
||||||
@@ -95,6 +95,10 @@ public class SessionAwareMethodInterceptor<D, C> implements MethodInterceptor {
|
|||||||
this.sessionType = sessionType;
|
this.sessionType = sessionType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.aopalliance.intercept.MethodInterceptor(org.aopalliance.intercept.MethodInvocation)
|
||||||
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
|
||||||
|
|||||||
@@ -16,13 +16,12 @@
|
|||||||
package org.springframework.data.mongodb;
|
package org.springframework.data.mongodb;
|
||||||
|
|
||||||
import org.springframework.dao.UncategorizedDataAccessException;
|
import org.springframework.dao.UncategorizedDataAccessException;
|
||||||
import org.springframework.lang.Nullable;
|
|
||||||
|
|
||||||
public class UncategorizedMongoDbException extends UncategorizedDataAccessException {
|
public class UncategorizedMongoDbException extends UncategorizedDataAccessException {
|
||||||
|
|
||||||
private static final long serialVersionUID = -2336595514062364929L;
|
private static final long serialVersionUID = -2336595514062364929L;
|
||||||
|
|
||||||
public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) {
|
public UncategorizedMongoDbException(String msg, Throwable cause) {
|
||||||
super(msg, cause);
|
super(msg, cause);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,107 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2022-2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.aot;
|
|
||||||
|
|
||||||
import java.lang.annotation.Annotation;
|
|
||||||
import java.lang.reflect.Field;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.LinkedHashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import org.springframework.aot.generate.GenerationContext;
|
|
||||||
import org.springframework.aot.hint.MemberCategory;
|
|
||||||
import org.springframework.aot.hint.TypeReference;
|
|
||||||
import org.springframework.core.annotation.AnnotatedElementUtils;
|
|
||||||
import org.springframework.core.annotation.MergedAnnotations;
|
|
||||||
import org.springframework.data.annotation.Reference;
|
|
||||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory;
|
|
||||||
import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public class LazyLoadingProxyAotProcessor {
|
|
||||||
|
|
||||||
private boolean generalLazyLoadingProxyContributed = false;
|
|
||||||
|
|
||||||
public void registerLazyLoadingProxyIfNeeded(Class<?> type, GenerationContext generationContext) {
|
|
||||||
|
|
||||||
Set<Field> refFields = getFieldsWithAnnotationPresent(type, Reference.class);
|
|
||||||
if (refFields.isEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
refFields.stream() //
|
|
||||||
.filter(LazyLoadingProxyAotProcessor::isLazyLoading) //
|
|
||||||
.forEach(field -> {
|
|
||||||
|
|
||||||
if (!generalLazyLoadingProxyContributed) {
|
|
||||||
generationContext.getRuntimeHints().proxies().registerJdkProxy(
|
|
||||||
TypeReference.of(org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class),
|
|
||||||
TypeReference.of(org.springframework.aop.SpringProxy.class),
|
|
||||||
TypeReference.of(org.springframework.aop.framework.Advised.class),
|
|
||||||
TypeReference.of(org.springframework.core.DecoratingProxy.class));
|
|
||||||
generalLazyLoadingProxyContributed = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (field.getType().isInterface()) {
|
|
||||||
|
|
||||||
List<Class<?>> interfaces = new ArrayList<>(
|
|
||||||
Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces()));
|
|
||||||
interfaces.add(org.springframework.aop.SpringProxy.class);
|
|
||||||
interfaces.add(org.springframework.aop.framework.Advised.class);
|
|
||||||
interfaces.add(org.springframework.core.DecoratingProxy.class);
|
|
||||||
|
|
||||||
generationContext.getRuntimeHints().proxies().registerJdkProxy(interfaces.toArray(Class[]::new));
|
|
||||||
} else {
|
|
||||||
|
|
||||||
Class<?> proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(),
|
|
||||||
LazyLoadingInterceptor::none);
|
|
||||||
|
|
||||||
// see: spring-projects/spring-framework/issues/29309
|
|
||||||
generationContext.getRuntimeHints().reflection().registerType(proxyClass,
|
|
||||||
MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS, MemberCategory.DECLARED_FIELDS);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private static boolean isLazyLoading(Field field) {
|
|
||||||
if (AnnotatedElementUtils.isAnnotated(field, DBRef.class)) {
|
|
||||||
return AnnotatedElementUtils.findMergedAnnotation(field, DBRef.class).lazy();
|
|
||||||
}
|
|
||||||
if (AnnotatedElementUtils.isAnnotated(field, DocumentReference.class)) {
|
|
||||||
return AnnotatedElementUtils.findMergedAnnotation(field, DocumentReference.class).lazy();
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Set<Field> getFieldsWithAnnotationPresent(Class<?> type, Class<? extends Annotation> annotation) {
|
|
||||||
|
|
||||||
Set<Field> fields = new LinkedHashSet<>();
|
|
||||||
for (Field field : type.getDeclaredFields()) {
|
|
||||||
if (MergedAnnotations.from(field).get(annotation).isPresent()) {
|
|
||||||
fields.add(field);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fields;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2022-2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.aot;
|
|
||||||
|
|
||||||
import java.util.function.Predicate;
|
|
||||||
|
|
||||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
|
||||||
import org.springframework.data.util.ReactiveWrappers;
|
|
||||||
import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary;
|
|
||||||
import org.springframework.data.util.TypeUtils;
|
|
||||||
import org.springframework.lang.Nullable;
|
|
||||||
import org.springframework.util.ClassUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public class MongoAotPredicates {
|
|
||||||
|
|
||||||
public static final Predicate<Class<?>> IS_SIMPLE_TYPE = (type) -> MongoSimpleTypes.HOLDER.isSimpleType(type) || TypeUtils.type(type).isPartOf("org.bson");
|
|
||||||
public static final Predicate<ReactiveLibrary> IS_REACTIVE_LIBARARY_AVAILABLE = (lib) -> ReactiveWrappers.isAvailable(lib);
|
|
||||||
public static final Predicate<ClassLoader> IS_SYNC_CLIENT_PRESENT = (classLoader) -> ClassUtils.isPresent("com.mongodb.client.MongoClient", classLoader);
|
|
||||||
|
|
||||||
public static boolean isReactorPresent() {
|
|
||||||
return IS_REACTIVE_LIBARARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean isSyncClientPresent(@Nullable ClassLoader classLoader) {
|
|
||||||
return IS_SYNC_CLIENT_PRESENT.test(classLoader);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2022-2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.aot;
|
|
||||||
|
|
||||||
import org.springframework.aot.generate.GenerationContext;
|
|
||||||
import org.springframework.core.ResolvableType;
|
|
||||||
import org.springframework.data.aot.ManagedTypesBeanRegistrationAotProcessor;
|
|
||||||
import org.springframework.data.mongodb.MongoManagedTypes;
|
|
||||||
import org.springframework.lang.Nullable;
|
|
||||||
import org.springframework.util.ClassUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 2022/06
|
|
||||||
*/
|
|
||||||
class MongoManagedTypesBeanRegistrationAotProcessor extends ManagedTypesBeanRegistrationAotProcessor {
|
|
||||||
|
|
||||||
private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor();
|
|
||||||
|
|
||||||
public MongoManagedTypesBeanRegistrationAotProcessor() {
|
|
||||||
setModuleIdentifier("mongo");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean isMatch(@Nullable Class<?> beanType, @Nullable String beanName) {
|
|
||||||
return isMongoManagedTypes(beanType) || super.isMatch(beanType, beanName);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected boolean isMongoManagedTypes(@Nullable Class<?> beanType) {
|
|
||||||
return beanType != null && ClassUtils.isAssignable(MongoManagedTypes.class, beanType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void contributeType(ResolvableType type, GenerationContext generationContext) {
|
|
||||||
|
|
||||||
if (MongoAotPredicates.IS_SIMPLE_TYPE.test(type.toClass())) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
super.contributeType(type, generationContext);
|
|
||||||
lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type.toClass(), generationContext);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2022-2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.aot;
|
|
||||||
|
|
||||||
import static org.springframework.data.mongodb.aot.MongoAotPredicates.*;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
import org.springframework.aot.hint.MemberCategory;
|
|
||||||
import org.springframework.aot.hint.RuntimeHints;
|
|
||||||
import org.springframework.aot.hint.RuntimeHintsRegistrar;
|
|
||||||
import org.springframework.aot.hint.TypeReference;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
|
||||||
import org.springframework.lang.Nullable;
|
|
||||||
import org.springframework.util.ClassUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@link RuntimeHintsRegistrar} for repository types and entity callbacks.
|
|
||||||
*
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @author Mark Paluch
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
class MongoRuntimeHints implements RuntimeHintsRegistrar {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
|
|
||||||
|
|
||||||
hints.reflection().registerTypes(
|
|
||||||
Arrays.asList(TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeSaveCallback.class),
|
|
||||||
TypeReference.of(AfterConvertCallback.class), TypeReference.of(AfterSaveCallback.class)),
|
|
||||||
builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS,
|
|
||||||
MemberCategory.INVOKE_PUBLIC_METHODS));
|
|
||||||
|
|
||||||
registerTransactionProxyHints(hints, classLoader);
|
|
||||||
|
|
||||||
if (isReactorPresent()) {
|
|
||||||
|
|
||||||
hints.reflection()
|
|
||||||
.registerTypes(Arrays.asList(TypeReference.of(ReactiveBeforeConvertCallback.class),
|
|
||||||
TypeReference.of(ReactiveBeforeSaveCallback.class), TypeReference.of(ReactiveAfterConvertCallback.class),
|
|
||||||
TypeReference.of(ReactiveAfterSaveCallback.class)),
|
|
||||||
builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS,
|
|
||||||
MemberCategory.INVOKE_PUBLIC_METHODS));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void registerTransactionProxyHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
|
|
||||||
|
|
||||||
if (MongoAotPredicates.isSyncClientPresent(classLoader)
|
|
||||||
&& ClassUtils.isPresent("org.springframework.aop.SpringProxy", classLoader)) {
|
|
||||||
|
|
||||||
hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoDatabase"),
|
|
||||||
TypeReference.of("org.springframework.aop.SpringProxy"),
|
|
||||||
TypeReference.of("org.springframework.core.DecoratingProxy"));
|
|
||||||
hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoCollection"),
|
|
||||||
TypeReference.of("org.springframework.aop.SpringProxy"),
|
|
||||||
TypeReference.of("org.springframework.core.DecoratingProxy"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -25,7 +25,9 @@ import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
|||||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver;
|
||||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||||
|
import org.springframework.data.mongodb.core.mapping.Document;
|
||||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||||
|
import org.springframework.lang.Nullable;
|
||||||
|
|
||||||
import com.mongodb.MongoClientSettings;
|
import com.mongodb.MongoClientSettings;
|
||||||
import com.mongodb.MongoClientSettings.Builder;
|
import com.mongodb.MongoClientSettings.Builder;
|
||||||
@@ -78,12 +80,30 @@ public abstract class AbstractMongoClientConfiguration extends MongoConfiguratio
|
|||||||
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration
|
||||||
|
* class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending
|
||||||
|
* {@link AbstractMongoClientConfiguration} the base package will be considered {@code com.acme} unless the method is
|
||||||
|
* overridden to implement alternate behavior.
|
||||||
|
*
|
||||||
|
* @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for
|
||||||
|
* entities.
|
||||||
|
* @deprecated use {@link #getMappingBasePackages()} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
@Nullable
|
||||||
|
protected String getMappingBasePackage() {
|
||||||
|
|
||||||
|
Package mappingBasePackage = getClass().getPackage();
|
||||||
|
return mappingBasePackage == null ? null : mappingBasePackage.getName();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
* Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and
|
||||||
* {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied.
|
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||||
*
|
*
|
||||||
* @see #customConversions()
|
* @see #customConversions()
|
||||||
* @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)
|
* @see #mongoMappingContext(MongoCustomConversions)
|
||||||
* @see #mongoDbFactory()
|
* @see #mongoDbFactory()
|
||||||
*/
|
*/
|
||||||
@Bean
|
@Bean
|
||||||
|
|||||||
@@ -84,10 +84,10 @@ public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurat
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
* Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and
|
||||||
* {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied.
|
* {@link #mongoMappingContext(MongoCustomConversions)}. Will get {@link #customConversions()} applied.
|
||||||
*
|
*
|
||||||
* @see #customConversions()
|
* @see #customConversions()
|
||||||
* @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)
|
* @see #mongoMappingContext(MongoCustomConversions)
|
||||||
* @see #reactiveMongoDbFactory()
|
* @see #reactiveMongoDbFactory()
|
||||||
* @return never {@literal null}.
|
* @return never {@literal null}.
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -30,6 +30,10 @@ import com.mongodb.ConnectionString;
|
|||||||
*/
|
*/
|
||||||
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
public class ConnectionStringPropertyEditor extends PropertyEditorSupport {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setAsText(@Nullable String connectionString) {
|
public void setAsText(@Nullable String connectionString) {
|
||||||
|
|
||||||
|
|||||||
@@ -34,6 +34,10 @@ import org.w3c.dom.Element;
|
|||||||
*/
|
*/
|
||||||
class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||||
throws BeanDefinitionStoreException {
|
throws BeanDefinitionStoreException {
|
||||||
@@ -42,6 +46,10 @@ class GridFsTemplateParser extends AbstractBeanDefinitionParser {
|
|||||||
return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME;
|
return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||||
|
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ import org.w3c.dom.Element;
|
|||||||
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||||
|
|
||||||
private static final String BASE_PACKAGE = "base-package";
|
private static final String BASE_PACKAGE = "base-package";
|
||||||
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("jakarta.validation.Validator",
|
private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator",
|
||||||
MappingMongoConverterParser.class.getClassLoader());
|
MappingMongoConverterParser.class.getClassLoader());
|
||||||
|
|
||||||
/* (non-Javadoc)
|
/* (non-Javadoc)
|
||||||
@@ -253,7 +253,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
|||||||
&& Boolean.parseBoolean(abbreviateFieldNames);
|
&& Boolean.parseBoolean(abbreviateFieldNames);
|
||||||
|
|
||||||
if (fieldNamingStrategyReferenced && abbreviationActivated) {
|
if (fieldNamingStrategyReferenced && abbreviationActivated) {
|
||||||
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured",
|
context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!",
|
||||||
element);
|
element);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -374,6 +374,10 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
|||||||
this.delegates = new HashSet<>(Arrays.asList(filters));
|
this.delegates = new HashSet<>(Arrays.asList(filters));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.core.type.filter.TypeFilter#match(org.springframework.core.type.classreading.MetadataReader, org.springframework.core.type.classreading.MetadataReaderFactory)
|
||||||
|
*/
|
||||||
public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
|
public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
|||||||
@@ -47,16 +47,28 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
|||||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||||
MongoAuditingRegistrar.class.getClassLoader());
|
MongoAuditingRegistrar.class.getClassLoader());
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Class<?> getBeanClass(Element element) {
|
protected Class<?> getBeanClass(Element element) {
|
||||||
return AuditingEntityCallback.class;
|
return AuditingEntityCallback.class;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#shouldGenerateId()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected boolean shouldGenerateId() {
|
protected boolean shouldGenerateId() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
|
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
|
||||||
|
|
||||||
|
|||||||
@@ -18,10 +18,11 @@ package org.springframework.data.mongodb.config;
|
|||||||
import java.lang.annotation.Annotation;
|
import java.lang.annotation.Annotation;
|
||||||
|
|
||||||
import org.springframework.beans.factory.config.BeanDefinition;
|
import org.springframework.beans.factory.config.BeanDefinition;
|
||||||
|
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||||
import org.springframework.core.Ordered;
|
import org.springframework.core.type.AnnotationMetadata;
|
||||||
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||||
@@ -35,42 +36,68 @@ import org.springframework.util.Assert;
|
|||||||
* @author Thomas Darimont
|
* @author Thomas Darimont
|
||||||
* @author Oliver Gierke
|
* @author Oliver Gierke
|
||||||
* @author Mark Paluch
|
* @author Mark Paluch
|
||||||
* @author Christoph Strobl
|
|
||||||
*/
|
*/
|
||||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport implements Ordered {
|
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Class<? extends Annotation> getAnnotation() {
|
protected Class<? extends Annotation> getAnnotation() {
|
||||||
return EnableMongoAuditing.class;
|
return EnableMongoAuditing.class;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected String getAuditingHandlerBeanName() {
|
protected String getAuditingHandlerBeanName() {
|
||||||
return "mongoAuditingHandler";
|
return "mongoAuditingHandler";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration,
|
public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) {
|
||||||
BeanDefinitionRegistry registry) {
|
|
||||||
|
|
||||||
builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext");
|
Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!");
|
||||||
|
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||||
|
|
||||||
|
super.registerBeanDefinitions(annotationMetadata, registry);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||||
|
|
||||||
Assert.notNull(configuration, "AuditingConfiguration must not be null");
|
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||||
|
|
||||||
return configureDefaultAuditHandlerAttributes(configuration,
|
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class);
|
||||||
BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class));
|
|
||||||
|
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||||
|
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||||
|
|
||||||
|
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||||
|
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||||
BeanDefinitionRegistry registry) {
|
BeanDefinitionRegistry registry) {
|
||||||
|
|
||||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null");
|
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null");
|
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||||
|
|
||||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||||
@@ -81,8 +108,4 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport impl
|
|||||||
AuditingEntityCallback.class.getName(), registry);
|
AuditingEntityCallback.class.getName(), registry);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getOrder() {
|
|
||||||
return Ordered.LOWEST_PRECEDENCE;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,6 +35,10 @@ import org.w3c.dom.Element;
|
|||||||
*/
|
*/
|
||||||
public class MongoClientParser implements BeanDefinitionParser {
|
public class MongoClientParser implements BeanDefinitionParser {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
public BeanDefinition parse(Element element, ParserContext parserContext) {
|
public BeanDefinition parse(Element element, ParserContext parserContext) {
|
||||||
|
|
||||||
Object source = parserContext.extractSource(element);
|
Object source = parserContext.extractSource(element);
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ import org.springframework.data.convert.CustomConversions;
|
|||||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||||
import org.springframework.data.mongodb.MongoManagedTypes;
|
|
||||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter;
|
||||||
import org.springframework.data.mongodb.core.mapping.Document;
|
import org.springframework.data.mongodb.core.mapping.Document;
|
||||||
@@ -77,13 +76,14 @@ public abstract class MongoConfigurationSupport {
|
|||||||
*
|
*
|
||||||
* @see #getMappingBasePackages()
|
* @see #getMappingBasePackages()
|
||||||
* @return
|
* @return
|
||||||
|
* @throws ClassNotFoundException
|
||||||
*/
|
*/
|
||||||
@Bean
|
@Bean
|
||||||
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions,
|
public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions)
|
||||||
MongoManagedTypes mongoManagedTypes) {
|
throws ClassNotFoundException {
|
||||||
|
|
||||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||||
mappingContext.setManagedTypes(mongoManagedTypes);
|
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||||
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder());
|
||||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||||
@@ -91,16 +91,6 @@ public abstract class MongoConfigurationSupport {
|
|||||||
return mappingContext;
|
return mappingContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @return new instance of {@link MongoManagedTypes}.
|
|
||||||
* @throws ClassNotFoundException
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
@Bean
|
|
||||||
public MongoManagedTypes mongoManagedTypes() throws ClassNotFoundException {
|
|
||||||
return MongoManagedTypes.fromIterable(getInitialEntitySet());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||||
* {@link CustomConversions} will be registered with the
|
* {@link CustomConversions} will be registered with the
|
||||||
@@ -206,7 +196,7 @@ public abstract class MongoConfigurationSupport {
|
|||||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||||
*
|
*
|
||||||
* @return {@literal false} by default. <br />
|
* @return {@literal false} by default. <br />
|
||||||
* <strong>INFO:</strong> As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
* <strong>INFO</strong>: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}.
|
||||||
* @since 2.2
|
* @since 2.2
|
||||||
*/
|
*/
|
||||||
protected boolean autoIndexCreation() {
|
protected boolean autoIndexCreation() {
|
||||||
|
|||||||
@@ -51,6 +51,10 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
|||||||
private static final String OPTIONS_DELIMITER = "?";
|
private static final String OPTIONS_DELIMITER = "?";
|
||||||
private static final String OPTION_VALUE_DELIMITER = "&";
|
private static final String OPTION_VALUE_DELIMITER = "&";
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setAsText(@Nullable String text) throws IllegalArgumentException {
|
public void setAsText(@Nullable String text) throws IllegalArgumentException {
|
||||||
|
|
||||||
@@ -117,7 +121,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
|||||||
userNameAndPassword[1].toCharArray()));
|
userNameAndPassword[1].toCharArray()));
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'", authMechanism));
|
String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -194,7 +198,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
|||||||
String[] optionArgs = option.split("=");
|
String[] optionArgs = option.split("=");
|
||||||
|
|
||||||
if (optionArgs.length == 1) {
|
if (optionArgs.length == 1) {
|
||||||
throw new IllegalArgumentException(String.format("Query parameter '%s' has no value", optionArgs[0]));
|
throw new IllegalArgumentException(String.format("Query parameter '%s' has no value!", optionArgs[0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
properties.put(optionArgs[0], optionArgs[1]);
|
properties.put(optionArgs[0], optionArgs[1]);
|
||||||
@@ -209,21 +213,21 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
|||||||
|
|
||||||
if (source.length != 2) {
|
if (source.length != 2) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Credentials need to specify username and password like in 'username:password@database'");
|
"Credentials need to specify username and password like in 'username:password@database'!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void verifyDatabasePresent(String source) {
|
private static void verifyDatabasePresent(String source) {
|
||||||
|
|
||||||
if (!StringUtils.hasText(source)) {
|
if (!StringUtils.hasText(source)) {
|
||||||
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'");
|
throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void verifyUserNamePresent(String[] source) {
|
private static void verifyUserNamePresent(String[] source) {
|
||||||
|
|
||||||
if (source.length == 0 || !StringUtils.hasText(source[0])) {
|
if (source.length == 0 || !StringUtils.hasText(source[0])) {
|
||||||
throw new IllegalArgumentException("Credentials need to specify username");
|
throw new IllegalArgumentException("Credentials need to specify username!");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -231,7 +235,7 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
|||||||
try {
|
try {
|
||||||
return URLDecoder.decode(it, "UTF-8");
|
return URLDecoder.decode(it, "UTF-8");
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
throw new IllegalArgumentException("o_O UTF-8 not supported", e);
|
throw new IllegalArgumentException("o_O UTF-8 not supported!", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -62,6 +62,10 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
|||||||
MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes);
|
MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||||
throws BeanDefinitionStoreException {
|
throws BeanDefinitionStoreException {
|
||||||
@@ -70,6 +74,10 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
|||||||
return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME;
|
return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||||
|
|
||||||
@@ -163,7 +171,7 @@ public class MongoDbFactoryParser extends AbstractBeanDefinitionParser {
|
|||||||
|
|
||||||
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
if (element.getAttributes().getLength() > allowedAttributesCount) {
|
||||||
|
|
||||||
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually",
|
parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually!",
|
||||||
parserContext.extractSource(element));
|
parserContext.extractSource(element));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,10 @@ import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
|
|||||||
*/
|
*/
|
||||||
public class MongoNamespaceHandler extends NamespaceHandlerSupport {
|
public class MongoNamespaceHandler extends NamespaceHandlerSupport {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.NamespaceHandler#init()
|
||||||
|
*/
|
||||||
public void init() {
|
public void init() {
|
||||||
|
|
||||||
registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser());
|
registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser());
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ import org.w3c.dom.Element;
|
|||||||
* @author Christoph Strobl
|
* @author Christoph Strobl
|
||||||
* @author Mark Paluch
|
* @author Mark Paluch
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
abstract class MongoParsingUtils {
|
abstract class MongoParsingUtils {
|
||||||
|
|
||||||
private MongoParsingUtils() {}
|
private MongoParsingUtils() {}
|
||||||
|
|||||||
@@ -39,6 +39,10 @@ import org.w3c.dom.Element;
|
|||||||
*/
|
*/
|
||||||
class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext)
|
||||||
throws BeanDefinitionStoreException {
|
throws BeanDefinitionStoreException {
|
||||||
@@ -47,6 +51,10 @@ class MongoTemplateParser extends AbstractBeanDefinitionParser {
|
|||||||
return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME;
|
return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
|
||||||
|
|
||||||
|
|||||||
@@ -41,11 +41,19 @@ public class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEnti
|
|||||||
this.converter = converter;
|
this.converter = converter;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public PersistentEntities getObject() {
|
public PersistentEntities getObject() {
|
||||||
return PersistentEntities.of(converter.getMappingContext());
|
return PersistentEntities.of(converter.getMappingContext());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Class<?> getObjectType() {
|
public Class<?> getObjectType() {
|
||||||
return PersistentEntities.class;
|
return PersistentEntities.class;
|
||||||
|
|||||||
@@ -18,9 +18,11 @@ package org.springframework.data.mongodb.config;
|
|||||||
import java.lang.annotation.Annotation;
|
import java.lang.annotation.Annotation;
|
||||||
|
|
||||||
import org.springframework.beans.factory.config.BeanDefinition;
|
import org.springframework.beans.factory.config.BeanDefinition;
|
||||||
|
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||||
|
import org.springframework.core.type.AnnotationMetadata;
|
||||||
import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler;
|
import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler;
|
||||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||||
@@ -32,42 +34,56 @@ import org.springframework.util.Assert;
|
|||||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation.
|
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation.
|
||||||
*
|
*
|
||||||
* @author Mark Paluch
|
* @author Mark Paluch
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 3.1
|
* @since 3.1
|
||||||
*/
|
*/
|
||||||
class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected Class<? extends Annotation> getAnnotation() {
|
protected Class<? extends Annotation> getAnnotation() {
|
||||||
return EnableReactiveMongoAuditing.class;
|
return EnableReactiveMongoAuditing.class;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected String getAuditingHandlerBeanName() {
|
protected String getAuditingHandlerBeanName() {
|
||||||
return "reactiveMongoAuditingHandler";
|
return "reactiveMongoAuditingHandler";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
/*
|
||||||
protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration,
|
* (non-Javadoc)
|
||||||
BeanDefinitionRegistry registry) {
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||||
builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext");
|
*/
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||||
|
|
||||||
Assert.notNull(configuration, "AuditingConfiguration must not be null");
|
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||||
|
|
||||||
return configureDefaultAuditHandlerAttributes(configuration,
|
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class);
|
||||||
BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class));
|
|
||||||
|
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||||
|
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||||
|
|
||||||
|
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||||
|
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||||
BeanDefinitionRegistry registry) {
|
BeanDefinitionRegistry registry) {
|
||||||
|
|
||||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null");
|
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null");
|
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||||
|
|
||||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||||
|
|
||||||
|
|||||||
@@ -32,6 +32,10 @@ import com.mongodb.ReadConcernLevel;
|
|||||||
*/
|
*/
|
||||||
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
public class ReadConcernPropertyEditor extends PropertyEditorSupport {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setAsText(@Nullable String readConcernString) {
|
public void setAsText(@Nullable String readConcernString) {
|
||||||
|
|
||||||
|
|||||||
@@ -29,6 +29,10 @@ import com.mongodb.ReadPreference;
|
|||||||
*/
|
*/
|
||||||
public class ReadPreferencePropertyEditor extends PropertyEditorSupport {
|
public class ReadPreferencePropertyEditor extends PropertyEditorSupport {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException {
|
public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException {
|
||||||
|
|
||||||
|
|||||||
@@ -43,9 +43,13 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
|||||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||||
*/
|
*/
|
||||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'; Check your replica set configuration";
|
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'. Check your replica set configuration!";
|
||||||
private static final Log LOG = LogFactory.getLog(ServerAddressPropertyEditor.class);
|
private static final Log LOG = LogFactory.getLog(ServerAddressPropertyEditor.class);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setAsText(@Nullable String replicaSetString) {
|
public void setAsText(@Nullable String replicaSetString) {
|
||||||
|
|
||||||
@@ -68,7 +72,7 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
|||||||
|
|
||||||
if (serverAddresses.isEmpty()) {
|
if (serverAddresses.isEmpty()) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Could not resolve at least one server of the replica set configuration; Validate your config");
|
"Could not resolve at least one server of the replica set configuration! Validate your config!");
|
||||||
}
|
}
|
||||||
|
|
||||||
setValue(serverAddresses.toArray(new ServerAddress[serverAddresses.size()]));
|
setValue(serverAddresses.toArray(new ServerAddress[serverAddresses.size()]));
|
||||||
@@ -125,7 +129,7 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
|||||||
*/
|
*/
|
||||||
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
private String[] extractHostAddressAndPort(String addressAndPortSource) {
|
||||||
|
|
||||||
Assert.notNull(addressAndPortSource, "Address and port source must not be null");
|
Assert.notNull(addressAndPortSource, "Address and port source must not be null!");
|
||||||
|
|
||||||
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN);
|
||||||
String hostAddress = hostAndPort[0];
|
String hostAddress = hostAndPort[0];
|
||||||
|
|||||||
@@ -26,6 +26,10 @@ import com.mongodb.WriteConcern;
|
|||||||
*/
|
*/
|
||||||
public class StringToWriteConcernConverter implements Converter<String, WriteConcern> {
|
public class StringToWriteConcernConverter implements Converter<String, WriteConcern> {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||||
|
*/
|
||||||
public WriteConcern convert(String source) {
|
public WriteConcern convert(String source) {
|
||||||
|
|
||||||
WriteConcern writeConcern = WriteConcern.valueOf(source);
|
WriteConcern writeConcern = WriteConcern.valueOf(source);
|
||||||
|
|||||||
@@ -29,6 +29,10 @@ import org.springframework.util.StringUtils;
|
|||||||
*/
|
*/
|
||||||
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
public class UUidRepresentationPropertyEditor extends PropertyEditorSupport {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see java.beans.PropertyEditorSupport#setAsText(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setAsText(@Nullable String value) {
|
public void setAsText(@Nullable String value) {
|
||||||
|
|
||||||
|
|||||||
@@ -66,12 +66,11 @@ class AggregationUtil {
|
|||||||
|
|
||||||
if (!(aggregation instanceof TypedAggregation)) {
|
if (!(aggregation instanceof TypedAggregation)) {
|
||||||
|
|
||||||
if(inputType == null) {
|
if (inputType == null) {
|
||||||
return untypedMappingContext.get();
|
return untypedMappingContext.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
if (domainTypeMapping == DomainTypeMapping.STRICT && !aggregation.getPipeline().containsUnionWith()) {
|
||||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
|
||||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,8 +78,7 @@ class AggregationUtil {
|
|||||||
}
|
}
|
||||||
|
|
||||||
inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
if (domainTypeMapping == DomainTypeMapping.STRICT && !aggregation.getPipeline().containsUnionWith()) {
|
||||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
|
||||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ import java.util.List;
|
|||||||
|
|
||||||
import org.springframework.data.mongodb.core.query.Query;
|
import org.springframework.data.mongodb.core.query.Query;
|
||||||
import org.springframework.data.mongodb.core.query.Update;
|
import org.springframework.data.mongodb.core.query.Update;
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
|
||||||
import org.springframework.data.util.Pair;
|
import org.springframework.data.util.Pair;
|
||||||
|
|
||||||
import com.mongodb.bulk.BulkWriteResult;
|
import com.mongodb.bulk.BulkWriteResult;
|
||||||
@@ -29,15 +28,6 @@ import com.mongodb.bulk.BulkWriteResult;
|
|||||||
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
|
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
|
||||||
* operations or list of similar operations in sequence which can then eventually be executed by calling
|
* operations or list of similar operations in sequence which can then eventually be executed by calling
|
||||||
* {@link #execute()}.
|
* {@link #execute()}.
|
||||||
*
|
|
||||||
* <pre class="code">
|
|
||||||
* MongoOperations ops = …;
|
|
||||||
*
|
|
||||||
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
|
|
||||||
* .insert(newPerson)
|
|
||||||
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
|
|
||||||
* .execute();
|
|
||||||
* </pre>
|
|
||||||
* <p>
|
* <p>
|
||||||
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
|
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
|
||||||
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
|
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
|
||||||
@@ -85,19 +75,7 @@ public interface BulkOperations {
|
|||||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||||
*/
|
*/
|
||||||
default BulkOperations updateOne(Query query, Update update) {
|
BulkOperations updateOne(Query query, Update update);
|
||||||
return updateOne(query, (UpdateDefinition) update);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
|
||||||
*
|
|
||||||
* @param query update criteria, must not be {@literal null}.
|
|
||||||
* @param update {@link Update} operation to perform, must not be {@literal null}.
|
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
BulkOperations updateOne(Query query, UpdateDefinition update);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
* Add a list of updates to the bulk operation. For each update request, only the first matching document is updated.
|
||||||
@@ -105,7 +83,7 @@ public interface BulkOperations {
|
|||||||
* @param updates Update operations to perform.
|
* @param updates Update operations to perform.
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||||
*/
|
*/
|
||||||
BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates);
|
BulkOperations updateOne(List<Pair<Query, Update>> updates);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
||||||
@@ -114,19 +92,7 @@ public interface BulkOperations {
|
|||||||
* @param update Update operation to perform.
|
* @param update Update operation to perform.
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||||
*/
|
*/
|
||||||
default BulkOperations updateMulti(Query query, Update update) {
|
BulkOperations updateMulti(Query query, Update update);
|
||||||
return updateMulti(query, (UpdateDefinition) update);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
|
||||||
*
|
|
||||||
* @param query Update criteria.
|
|
||||||
* @param update Update operation to perform.
|
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
BulkOperations updateMulti(Query query, UpdateDefinition update);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
* Add a list of updates to the bulk operation. For each update request, all matching documents are updated.
|
||||||
@@ -134,7 +100,7 @@ public interface BulkOperations {
|
|||||||
* @param updates Update operations to perform.
|
* @param updates Update operations to perform.
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||||
*/
|
*/
|
||||||
BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates);
|
BulkOperations updateMulti(List<Pair<Query, Update>> updates);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||||
@@ -144,20 +110,7 @@ public interface BulkOperations {
|
|||||||
* @param update Update operation to perform.
|
* @param update Update operation to perform.
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
||||||
*/
|
*/
|
||||||
default BulkOperations upsert(Query query, Update update) {
|
BulkOperations upsert(Query query, Update update);
|
||||||
return upsert(query, (UpdateDefinition) update);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
|
||||||
* else an insert.
|
|
||||||
*
|
|
||||||
* @param query Update criteria.
|
|
||||||
* @param update Update operation to perform.
|
|
||||||
* @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}.
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
BulkOperations upsert(Query query, UpdateDefinition update);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
* Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
||||||
@@ -189,7 +142,7 @@ public interface BulkOperations {
|
|||||||
*
|
*
|
||||||
* @param query Update criteria.
|
* @param query Update criteria.
|
||||||
* @param replacement the replacement document. Must not be {@literal null}.
|
* @param replacement the replacement document. Must not be {@literal null}.
|
||||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||||
* @since 2.2
|
* @since 2.2
|
||||||
*/
|
*/
|
||||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||||
@@ -202,7 +155,7 @@ public interface BulkOperations {
|
|||||||
* @param query Update criteria.
|
* @param query Update criteria.
|
||||||
* @param replacement the replacement document. Must not be {@literal null}.
|
* @param replacement the replacement document. Must not be {@literal null}.
|
||||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||||
* @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}.
|
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||||
* @since 2.2
|
* @since 2.2
|
||||||
*/
|
*/
|
||||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||||
|
|||||||
@@ -1,221 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.core;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
import org.bson.Document;
|
|
||||||
import org.bson.conversions.Bson;
|
|
||||||
import org.springframework.context.ApplicationEvent;
|
|
||||||
import org.springframework.data.mapping.PersistentEntity;
|
|
||||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
|
||||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
|
||||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
|
||||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
|
||||||
import org.springframework.data.mongodb.core.query.Collation;
|
|
||||||
import org.springframework.data.mongodb.core.query.Query;
|
|
||||||
import org.springframework.data.mongodb.core.query.Update;
|
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
|
||||||
import org.springframework.util.Assert;
|
|
||||||
|
|
||||||
import com.mongodb.client.model.BulkWriteOptions;
|
|
||||||
import com.mongodb.client.model.DeleteManyModel;
|
|
||||||
import com.mongodb.client.model.DeleteOneModel;
|
|
||||||
import com.mongodb.client.model.InsertOneModel;
|
|
||||||
import com.mongodb.client.model.ReplaceOneModel;
|
|
||||||
import com.mongodb.client.model.UpdateManyModel;
|
|
||||||
import com.mongodb.client.model.UpdateOneModel;
|
|
||||||
import com.mongodb.client.model.UpdateOptions;
|
|
||||||
import com.mongodb.client.model.WriteModel;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Support class for bulk operations.
|
|
||||||
*
|
|
||||||
* @author Mark Paluch
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
abstract class BulkOperationsSupport {
|
|
||||||
|
|
||||||
private final String collectionName;
|
|
||||||
|
|
||||||
BulkOperationsSupport(String collectionName) {
|
|
||||||
|
|
||||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
|
||||||
|
|
||||||
this.collectionName = collectionName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit a {@link BeforeSaveEvent}.
|
|
||||||
*
|
|
||||||
* @param holder
|
|
||||||
*/
|
|
||||||
void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
|
||||||
|
|
||||||
if (holder.model() instanceof InsertOneModel) {
|
|
||||||
|
|
||||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
|
||||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
|
||||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
|
||||||
|
|
||||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
|
||||||
maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit a {@link AfterSaveEvent}.
|
|
||||||
*
|
|
||||||
* @param holder
|
|
||||||
*/
|
|
||||||
void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
|
||||||
|
|
||||||
if (holder.model() instanceof InsertOneModel) {
|
|
||||||
|
|
||||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
|
||||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
|
||||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
|
||||||
|
|
||||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
|
||||||
maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
WriteModel<Document> mapWriteModel(Object source, WriteModel<Document> writeModel) {
|
|
||||||
|
|
||||||
if (writeModel instanceof UpdateOneModel<Document> model) {
|
|
||||||
|
|
||||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
|
||||||
|
|
||||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
|
||||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
|
||||||
}
|
|
||||||
|
|
||||||
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
|
||||||
model.getOptions());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (writeModel instanceof UpdateManyModel<Document> model) {
|
|
||||||
|
|
||||||
if (source instanceof AggregationUpdate aggregationUpdate) {
|
|
||||||
|
|
||||||
List<Document> pipeline = mapUpdatePipeline(aggregationUpdate);
|
|
||||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions());
|
|
||||||
}
|
|
||||||
|
|
||||||
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
|
||||||
model.getOptions());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (writeModel instanceof DeleteOneModel<Document> model) {
|
|
||||||
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (writeModel instanceof DeleteManyModel<Document> model) {
|
|
||||||
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
|
||||||
}
|
|
||||||
|
|
||||||
return writeModel;
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<Document> mapUpdatePipeline(AggregationUpdate source) {
|
|
||||||
|
|
||||||
Class<?> type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class;
|
|
||||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type,
|
|
||||||
updateMapper().getMappingContext(), queryMapper());
|
|
||||||
|
|
||||||
return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit a {@link ApplicationEvent} if event multicasting is enabled.
|
|
||||||
*
|
|
||||||
* @param event
|
|
||||||
*/
|
|
||||||
protected abstract void maybeEmitEvent(ApplicationEvent event);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the {@link UpdateMapper} to use.
|
|
||||||
*/
|
|
||||||
protected abstract UpdateMapper updateMapper();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the {@link QueryMapper} to use.
|
|
||||||
*/
|
|
||||||
protected abstract QueryMapper queryMapper();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}.
|
|
||||||
*/
|
|
||||||
protected abstract Optional<? extends MongoPersistentEntity<?>> entity();
|
|
||||||
|
|
||||||
protected Bson getMappedUpdate(Bson update) {
|
|
||||||
return updateMapper().getMappedObject(update, entity());
|
|
||||||
}
|
|
||||||
|
|
||||||
protected Bson getMappedQuery(Bson query) {
|
|
||||||
return queryMapper().getMappedObject(query, entity());
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
|
||||||
|
|
||||||
BulkWriteOptions options = new BulkWriteOptions();
|
|
||||||
|
|
||||||
return switch (bulkMode) {
|
|
||||||
case ORDERED -> options.ordered(true);
|
|
||||||
case UNORDERED -> options.ordered(false);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
|
||||||
* @param update The {@link Update} to apply
|
|
||||||
* @param upsert flag to indicate if document should be upserted.
|
|
||||||
* @return new instance of {@link UpdateOptions}.
|
|
||||||
*/
|
|
||||||
protected static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
|
||||||
|
|
||||||
UpdateOptions options = new UpdateOptions();
|
|
||||||
options.upsert(upsert);
|
|
||||||
|
|
||||||
if (update.hasArrayFilters()) {
|
|
||||||
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
|
||||||
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
|
||||||
list.add(arrayFilter.asDocument());
|
|
||||||
}
|
|
||||||
options.arrayFilters(list);
|
|
||||||
}
|
|
||||||
|
|
||||||
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
|
||||||
return options;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
|
||||||
*
|
|
||||||
* @author Christoph Strobl
|
|
||||||
*/
|
|
||||||
record SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -36,29 +36,21 @@ import com.mongodb.client.model.changestream.OperationType;
|
|||||||
*
|
*
|
||||||
* @author Christoph Strobl
|
* @author Christoph Strobl
|
||||||
* @author Mark Paluch
|
* @author Mark Paluch
|
||||||
* @author Myroslav Kosinskyi
|
|
||||||
* @since 2.1
|
* @since 2.1
|
||||||
*/
|
*/
|
||||||
public class ChangeStreamEvent<T> {
|
public class ChangeStreamEvent<T> {
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes") //
|
@SuppressWarnings("rawtypes") //
|
||||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_FULL_DOCUMENT_UPDATER = AtomicReferenceFieldUpdater
|
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_UPDATER = AtomicReferenceFieldUpdater
|
||||||
.newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocument");
|
.newUpdater(ChangeStreamEvent.class, Object.class, "converted");
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes") //
|
|
||||||
private static final AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER = AtomicReferenceFieldUpdater
|
|
||||||
.newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocumentBeforeChange");
|
|
||||||
|
|
||||||
private final @Nullable ChangeStreamDocument<Document> raw;
|
private final @Nullable ChangeStreamDocument<Document> raw;
|
||||||
|
|
||||||
private final Class<T> targetType;
|
private final Class<T> targetType;
|
||||||
private final MongoConverter converter;
|
private final MongoConverter converter;
|
||||||
|
|
||||||
// accessed through CONVERTED_FULL_DOCUMENT_UPDATER.
|
// accessed through CONVERTED_UPDATER.
|
||||||
private volatile @Nullable T convertedFullDocument;
|
private volatile @Nullable T converted;
|
||||||
|
|
||||||
// accessed through CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER.
|
|
||||||
private volatile @Nullable T convertedFullDocumentBeforeChange;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param raw can be {@literal null}.
|
* @param raw can be {@literal null}.
|
||||||
@@ -155,43 +147,27 @@ public class ChangeStreamEvent<T> {
|
|||||||
@Nullable
|
@Nullable
|
||||||
public T getBody() {
|
public T getBody() {
|
||||||
|
|
||||||
if (raw == null || raw.getFullDocument() == null) {
|
if (raw == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return getConvertedFullDocument(raw.getFullDocument());
|
Document fullDocument = raw.getFullDocument();
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
if (fullDocument == null) {
|
||||||
* Get the potentially converted {@link ChangeStreamDocument#getFullDocumentBeforeChange() document} before being changed.
|
return targetType.cast(fullDocument);
|
||||||
*
|
|
||||||
* @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocumentBeforeChange()} is
|
|
||||||
* {@literal null}.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
@Nullable
|
|
||||||
public T getBodyBeforeChange() {
|
|
||||||
|
|
||||||
if (raw == null || raw.getFullDocumentBeforeChange() == null) {
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return getConvertedFullDocumentBeforeChange(raw.getFullDocumentBeforeChange());
|
return getConverted(fullDocument);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private T getConvertedFullDocumentBeforeChange(Document fullDocument) {
|
private T getConverted(Document fullDocument) {
|
||||||
return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER);
|
return (T) doGetConverted(fullDocument);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
private Object doGetConverted(Document fullDocument) {
|
||||||
private T getConvertedFullDocument(Document fullDocument) {
|
|
||||||
return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_UPDATER);
|
|
||||||
}
|
|
||||||
|
|
||||||
private Object doGetConverted(Document fullDocument, AtomicReferenceFieldUpdater<ChangeStreamEvent, Object> updater) {
|
Object result = CONVERTED_UPDATER.get(this);
|
||||||
|
|
||||||
Object result = updater.get(this);
|
|
||||||
|
|
||||||
if (result != null) {
|
if (result != null) {
|
||||||
return result;
|
return result;
|
||||||
@@ -200,19 +176,23 @@ public class ChangeStreamEvent<T> {
|
|||||||
if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) {
|
if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) {
|
||||||
|
|
||||||
result = converter.read(targetType, fullDocument);
|
result = converter.read(targetType, fullDocument);
|
||||||
return updater.compareAndSet(this, null, result) ? result : updater.get(this);
|
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) {
|
if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) {
|
||||||
|
|
||||||
result = converter.getConversionService().convert(fullDocument, targetType);
|
result = converter.getConversionService().convert(fullDocument, targetType);
|
||||||
return updater.compareAndSet(this, null, result) ? result : updater.get(this);
|
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType));
|
String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see java.lang.Object#toString()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}';
|
return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}';
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ import org.springframework.util.ObjectUtils;
|
|||||||
|
|
||||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||||
import com.mongodb.client.model.changestream.FullDocument;
|
import com.mongodb.client.model.changestream.FullDocument;
|
||||||
import com.mongodb.client.model.changestream.FullDocumentBeforeChange;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Options applicable to MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Streams</a>. Intended
|
* Options applicable to MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Streams</a>. Intended
|
||||||
@@ -41,7 +40,6 @@ import com.mongodb.client.model.changestream.FullDocumentBeforeChange;
|
|||||||
*
|
*
|
||||||
* @author Christoph Strobl
|
* @author Christoph Strobl
|
||||||
* @author Mark Paluch
|
* @author Mark Paluch
|
||||||
* @author Myroslav Kosinskyi
|
|
||||||
* @since 2.1
|
* @since 2.1
|
||||||
*/
|
*/
|
||||||
public class ChangeStreamOptions {
|
public class ChangeStreamOptions {
|
||||||
@@ -49,7 +47,6 @@ public class ChangeStreamOptions {
|
|||||||
private @Nullable Object filter;
|
private @Nullable Object filter;
|
||||||
private @Nullable BsonValue resumeToken;
|
private @Nullable BsonValue resumeToken;
|
||||||
private @Nullable FullDocument fullDocumentLookup;
|
private @Nullable FullDocument fullDocumentLookup;
|
||||||
private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup;
|
|
||||||
private @Nullable Collation collation;
|
private @Nullable Collation collation;
|
||||||
private @Nullable Object resumeTimestamp;
|
private @Nullable Object resumeTimestamp;
|
||||||
private Resume resume = Resume.UNDEFINED;
|
private Resume resume = Resume.UNDEFINED;
|
||||||
@@ -77,14 +74,6 @@ public class ChangeStreamOptions {
|
|||||||
return Optional.ofNullable(fullDocumentLookup);
|
return Optional.ofNullable(fullDocumentLookup);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @return {@link Optional#empty()} if not set.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public Optional<FullDocumentBeforeChange> getFullDocumentBeforeChangeLookup() {
|
|
||||||
return Optional.ofNullable(fullDocumentBeforeChangeLookup);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return {@link Optional#empty()} if not set.
|
* @return {@link Optional#empty()} if not set.
|
||||||
*/
|
*/
|
||||||
@@ -150,16 +139,16 @@ public class ChangeStreamOptions {
|
|||||||
return timestamp;
|
return timestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (timestamp instanceof Instant instant) {
|
if (timestamp instanceof Instant) {
|
||||||
return new BsonTimestamp((int) instant.getEpochSecond(), 0);
|
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (timestamp instanceof BsonTimestamp bsonTimestamp) {
|
if (timestamp instanceof BsonTimestamp) {
|
||||||
return Instant.ofEpochSecond(bsonTimestamp.getTime());
|
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"o_O that should actually not happen; The timestamp should be an Instant or a BsonTimestamp but was "
|
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -181,9 +170,6 @@ public class ChangeStreamOptions {
|
|||||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) {
|
if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentBeforeChangeLookup, that.fullDocumentBeforeChangeLookup)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) {
|
if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -198,7 +184,6 @@ public class ChangeStreamOptions {
|
|||||||
int result = ObjectUtils.nullSafeHashCode(filter);
|
int result = ObjectUtils.nullSafeHashCode(filter);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentBeforeChangeLookup);
|
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resume);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(resume);
|
||||||
@@ -235,7 +220,6 @@ public class ChangeStreamOptions {
|
|||||||
private @Nullable Object filter;
|
private @Nullable Object filter;
|
||||||
private @Nullable BsonValue resumeToken;
|
private @Nullable BsonValue resumeToken;
|
||||||
private @Nullable FullDocument fullDocumentLookup;
|
private @Nullable FullDocument fullDocumentLookup;
|
||||||
private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup;
|
|
||||||
private @Nullable Collation collation;
|
private @Nullable Collation collation;
|
||||||
private @Nullable Object resumeTimestamp;
|
private @Nullable Object resumeTimestamp;
|
||||||
private Resume resume = Resume.UNDEFINED;
|
private Resume resume = Resume.UNDEFINED;
|
||||||
@@ -250,7 +234,7 @@ public class ChangeStreamOptions {
|
|||||||
*/
|
*/
|
||||||
public ChangeStreamOptionsBuilder collation(Collation collation) {
|
public ChangeStreamOptionsBuilder collation(Collation collation) {
|
||||||
|
|
||||||
Assert.notNull(collation, "Collation must not be null nor empty");
|
Assert.notNull(collation, "Collation must not be null nor empty!");
|
||||||
|
|
||||||
this.collation = collation;
|
this.collation = collation;
|
||||||
return this;
|
return this;
|
||||||
@@ -274,7 +258,7 @@ public class ChangeStreamOptions {
|
|||||||
*/
|
*/
|
||||||
public ChangeStreamOptionsBuilder filter(Aggregation filter) {
|
public ChangeStreamOptionsBuilder filter(Aggregation filter) {
|
||||||
|
|
||||||
Assert.notNull(filter, "Filter must not be null");
|
Assert.notNull(filter, "Filter must not be null!");
|
||||||
|
|
||||||
this.filter = filter;
|
this.filter = filter;
|
||||||
return this;
|
return this;
|
||||||
@@ -303,7 +287,7 @@ public class ChangeStreamOptions {
|
|||||||
*/
|
*/
|
||||||
public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) {
|
public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) {
|
||||||
|
|
||||||
Assert.notNull(resumeToken, "ResumeToken must not be null");
|
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||||
|
|
||||||
this.resumeToken = resumeToken;
|
this.resumeToken = resumeToken;
|
||||||
|
|
||||||
@@ -332,38 +316,12 @@ public class ChangeStreamOptions {
|
|||||||
*/
|
*/
|
||||||
public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) {
|
public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) {
|
||||||
|
|
||||||
Assert.notNull(lookup, "Lookup must not be null");
|
Assert.notNull(lookup, "Lookup must not be null!");
|
||||||
|
|
||||||
this.fullDocumentLookup = lookup;
|
this.fullDocumentLookup = lookup;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the {@link FullDocumentBeforeChange} lookup to use.
|
|
||||||
*
|
|
||||||
* @param lookup must not be {@literal null}.
|
|
||||||
* @return this.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public ChangeStreamOptionsBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) {
|
|
||||||
|
|
||||||
Assert.notNull(lookup, "Lookup must not be null");
|
|
||||||
|
|
||||||
this.fullDocumentBeforeChangeLookup = lookup;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the full document before being changed if it is available.
|
|
||||||
*
|
|
||||||
* @return this.
|
|
||||||
* @since 4.0
|
|
||||||
* @see #fullDocumentBeforeChangeLookup(FullDocumentBeforeChange)
|
|
||||||
*/
|
|
||||||
public ChangeStreamOptionsBuilder returnFullDocumentBeforeChange() {
|
|
||||||
return fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the cluster time to resume from.
|
* Set the cluster time to resume from.
|
||||||
*
|
*
|
||||||
@@ -372,7 +330,7 @@ public class ChangeStreamOptions {
|
|||||||
*/
|
*/
|
||||||
public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) {
|
public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) {
|
||||||
|
|
||||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null");
|
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||||
|
|
||||||
this.resumeTimestamp = resumeTimestamp;
|
this.resumeTimestamp = resumeTimestamp;
|
||||||
return this;
|
return this;
|
||||||
@@ -387,7 +345,7 @@ public class ChangeStreamOptions {
|
|||||||
*/
|
*/
|
||||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||||
|
|
||||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null");
|
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||||
|
|
||||||
this.resumeTimestamp = resumeTimestamp;
|
this.resumeTimestamp = resumeTimestamp;
|
||||||
return this;
|
return this;
|
||||||
@@ -433,7 +391,6 @@ public class ChangeStreamOptions {
|
|||||||
options.filter = this.filter;
|
options.filter = this.filter;
|
||||||
options.resumeToken = this.resumeToken;
|
options.resumeToken = this.resumeToken;
|
||||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||||
options.fullDocumentBeforeChangeLookup = this.fullDocumentBeforeChangeLookup;
|
|
||||||
options.collation = this.collation;
|
options.collation = this.collation;
|
||||||
options.resumeTimestamp = this.resumeTimestamp;
|
options.resumeTimestamp = this.resumeTimestamp;
|
||||||
options.resume = this.resume;
|
options.resume = this.resume;
|
||||||
|
|||||||
@@ -47,11 +47,23 @@ public class CollectionOptions {
|
|||||||
private @Nullable Collation collation;
|
private @Nullable Collation collation;
|
||||||
private ValidationOptions validationOptions;
|
private ValidationOptions validationOptions;
|
||||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||||
private @Nullable CollectionChangeStreamOptions changeStreamOptions;
|
|
||||||
|
/**
|
||||||
|
* Constructs a new <code>CollectionOptions</code> instance.
|
||||||
|
*
|
||||||
|
* @param size the collection size in bytes, this data space is preallocated. Can be {@literal null}.
|
||||||
|
* @param maxDocuments the maximum number of documents in the collection. Can be {@literal null}.
|
||||||
|
* @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order),
|
||||||
|
* false otherwise. Can be {@literal null}.
|
||||||
|
* @deprecated since 2.0 please use {@link CollectionOptions#empty()} as entry point.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||||
|
this(size, maxDocuments, capped, null, ValidationOptions.none(), null);
|
||||||
|
}
|
||||||
|
|
||||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||||
@Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions,
|
@Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions) {
|
||||||
@Nullable CollectionChangeStreamOptions changeStreamOptions) {
|
|
||||||
|
|
||||||
this.maxDocuments = maxDocuments;
|
this.maxDocuments = maxDocuments;
|
||||||
this.size = size;
|
this.size = size;
|
||||||
@@ -59,7 +71,6 @@ public class CollectionOptions {
|
|||||||
this.collation = collation;
|
this.collation = collation;
|
||||||
this.validationOptions = validationOptions;
|
this.validationOptions = validationOptions;
|
||||||
this.timeSeriesOptions = timeSeriesOptions;
|
this.timeSeriesOptions = timeSeriesOptions;
|
||||||
this.changeStreamOptions = changeStreamOptions;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -71,9 +82,9 @@ public class CollectionOptions {
|
|||||||
*/
|
*/
|
||||||
public static CollectionOptions just(Collation collation) {
|
public static CollectionOptions just(Collation collation) {
|
||||||
|
|
||||||
Assert.notNull(collation, "Collation must not be null");
|
Assert.notNull(collation, "Collation must not be null!");
|
||||||
|
|
||||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null);
|
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -83,7 +94,7 @@ public class CollectionOptions {
|
|||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
public static CollectionOptions empty() {
|
public static CollectionOptions empty() {
|
||||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null);
|
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -100,28 +111,15 @@ public class CollectionOptions {
|
|||||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Quick way to set up {@link CollectionOptions} for emitting (pre & post) change events.
|
|
||||||
*
|
|
||||||
* @return new instance of {@link CollectionOptions}.
|
|
||||||
* @see #changeStream(CollectionChangeStreamOptions)
|
|
||||||
* @see CollectionChangeStreamOptions#preAndPostImages(boolean)
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public static CollectionOptions emitChangedRevisions() {
|
|
||||||
return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}. <br />
|
* Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}. <br />
|
||||||
* <strong>NOTE:</strong> Using capped collections requires defining {@link #size(long)}.
|
* <strong>NOTE</strong> Using capped collections requires defining {@link #size(long)}.
|
||||||
*
|
*
|
||||||
* @return new {@link CollectionOptions}.
|
* @return new {@link CollectionOptions}.
|
||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
public CollectionOptions capped() {
|
public CollectionOptions capped() {
|
||||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions,
|
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null);
|
||||||
changeStreamOptions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -132,8 +130,7 @@ public class CollectionOptions {
|
|||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||||
changeStreamOptions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -144,8 +141,7 @@ public class CollectionOptions {
|
|||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
public CollectionOptions size(long size) {
|
public CollectionOptions size(long size) {
|
||||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||||
changeStreamOptions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -156,8 +152,7 @@ public class CollectionOptions {
|
|||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
public CollectionOptions collation(@Nullable Collation collation) {
|
public CollectionOptions collation(@Nullable Collation collation) {
|
||||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||||
changeStreamOptions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -249,7 +244,7 @@ public class CollectionOptions {
|
|||||||
*/
|
*/
|
||||||
public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) {
|
public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) {
|
||||||
|
|
||||||
Assert.notNull(validationLevel, "ValidationLevel must not be null");
|
Assert.notNull(validationLevel, "ValidationLevel must not be null!");
|
||||||
return validation(validationOptions.validationLevel(validationLevel));
|
return validation(validationOptions.validationLevel(validationLevel));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -263,7 +258,7 @@ public class CollectionOptions {
|
|||||||
*/
|
*/
|
||||||
public CollectionOptions schemaValidationAction(ValidationAction validationAction) {
|
public CollectionOptions schemaValidationAction(ValidationAction validationAction) {
|
||||||
|
|
||||||
Assert.notNull(validationAction, "ValidationAction must not be null");
|
Assert.notNull(validationAction, "ValidationAction must not be null!");
|
||||||
return validation(validationOptions.validationAction(validationAction));
|
return validation(validationOptions.validationAction(validationAction));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -276,9 +271,8 @@ public class CollectionOptions {
|
|||||||
*/
|
*/
|
||||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||||
|
|
||||||
Assert.notNull(validationOptions, "ValidationOptions must not be null");
|
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||||
changeStreamOptions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -290,23 +284,8 @@ public class CollectionOptions {
|
|||||||
*/
|
*/
|
||||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||||
|
|
||||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null");
|
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!");
|
||||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||||
changeStreamOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
|
||||||
*
|
|
||||||
* @param changeStreamOptions must not be {@literal null}.
|
|
||||||
* @return new instance of {@link CollectionOptions}.
|
|
||||||
* @since 3.3
|
|
||||||
*/
|
|
||||||
public CollectionOptions changeStream(CollectionChangeStreamOptions changeStreamOptions) {
|
|
||||||
|
|
||||||
Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null");
|
|
||||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions,
|
|
||||||
changeStreamOptions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -367,21 +346,11 @@ public class CollectionOptions {
|
|||||||
return Optional.ofNullable(timeSeriesOptions);
|
return Optional.ofNullable(timeSeriesOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the {@link CollectionChangeStreamOptions} if available.
|
|
||||||
*
|
|
||||||
* @return {@link Optional#empty()} if not specified.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public Optional<CollectionChangeStreamOptions> getChangeStreamOptions() {
|
|
||||||
return Optional.ofNullable(changeStreamOptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped
|
return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped
|
||||||
+ ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions="
|
+ ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions="
|
||||||
+ timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", disableValidation="
|
+ timeSeriesOptions + ", disableValidation="
|
||||||
+ disableValidation() + ", strictValidation=" + strictValidation() + ", moderateValidation="
|
+ disableValidation() + ", strictValidation=" + strictValidation() + ", moderateValidation="
|
||||||
+ moderateValidation() + ", warnOnValidationError=" + warnOnValidationError() + ", failOnValidationError="
|
+ moderateValidation() + ", warnOnValidationError=" + warnOnValidationError() + ", failOnValidationError="
|
||||||
+ failOnValidationError() + '}';
|
+ failOnValidationError() + '}';
|
||||||
@@ -413,10 +382,7 @@ public class CollectionOptions {
|
|||||||
if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) {
|
if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) {
|
return ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions);
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -427,7 +393,6 @@ public class CollectionOptions {
|
|||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions);
|
result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions);
|
||||||
result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions);
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -561,58 +526,6 @@ public class CollectionOptions {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Encapsulation of options applied to define collections change stream behaviour.
|
|
||||||
*
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
public static class CollectionChangeStreamOptions {
|
|
||||||
|
|
||||||
private final boolean preAndPostImages;
|
|
||||||
|
|
||||||
private CollectionChangeStreamOptions(boolean emitChangedRevisions) {
|
|
||||||
this.preAndPostImages = emitChangedRevisions;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Output the version of a document before and after changes (the document pre- and post-images).
|
|
||||||
*
|
|
||||||
* @return new instance of {@link CollectionChangeStreamOptions}.
|
|
||||||
*/
|
|
||||||
public static CollectionChangeStreamOptions preAndPostImages(boolean emitChangedRevisions) {
|
|
||||||
return new CollectionChangeStreamOptions(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean getPreAndPostImages() {
|
|
||||||
return preAndPostImages;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "CollectionChangeStreamOptions{" + "preAndPostImages=" + preAndPostImages + '}';
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(@Nullable Object o) {
|
|
||||||
if (this == o) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (o == null || getClass() != o.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
CollectionChangeStreamOptions that = (CollectionChangeStreamOptions) o;
|
|
||||||
|
|
||||||
return preAndPostImages == that.preAndPostImages;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return (preAndPostImages ? 1 : 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Options applicable to Time Series collections.
|
* Options applicable to Time Series collections.
|
||||||
*
|
*
|
||||||
@@ -631,7 +544,7 @@ public class CollectionOptions {
|
|||||||
|
|
||||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||||
|
|
||||||
Assert.hasText(timeField, "Time field must not be empty or null");
|
Assert.hasText(timeField, "Time field must not be empty or null!");
|
||||||
|
|
||||||
this.timeField = timeField;
|
this.timeField = timeField;
|
||||||
this.metaField = metaField;
|
this.metaField = metaField;
|
||||||
|
|||||||
@@ -1,61 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.core;
|
|
||||||
|
|
||||||
import org.springframework.util.Assert;
|
|
||||||
|
|
||||||
import com.mongodb.client.MongoCollection;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Interface for functional preparation of a {@link MongoCollection}.
|
|
||||||
*
|
|
||||||
* @author Mark Paluch
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
public interface CollectionPreparer<T> {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a preparer that always returns its input collection.
|
|
||||||
*
|
|
||||||
* @return a preparer that always returns its input collection.
|
|
||||||
*/
|
|
||||||
static <T> CollectionPreparer<T> identity() {
|
|
||||||
return it -> it;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Prepare the {@code collection}.
|
|
||||||
*
|
|
||||||
* @param collection the collection to prepare.
|
|
||||||
* @return the prepared collection.
|
|
||||||
*/
|
|
||||||
T prepare(T collection);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies
|
|
||||||
* the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to
|
|
||||||
* the caller of the composed function.
|
|
||||||
*
|
|
||||||
* @param after the collection preparer to apply after this function is applied.
|
|
||||||
* @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after}
|
|
||||||
* preparer.
|
|
||||||
*/
|
|
||||||
default CollectionPreparer<T> andThen(CollectionPreparer<T> after) {
|
|
||||||
Assert.notNull(after, "After CollectionPreparer must not be null");
|
|
||||||
return c -> after.prepare(prepare(c));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,182 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.core;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.BiFunction;
|
|
||||||
import java.util.function.Function;
|
|
||||||
|
|
||||||
import org.bson.Document;
|
|
||||||
|
|
||||||
import com.mongodb.ReadConcern;
|
|
||||||
import com.mongodb.ReadPreference;
|
|
||||||
import com.mongodb.client.MongoCollection;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon
|
|
||||||
* {@link CollectionPreparer preparing a collection}.
|
|
||||||
*
|
|
||||||
* @author Mark Paluch
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware {
|
|
||||||
|
|
||||||
private final List<Object> sources;
|
|
||||||
|
|
||||||
private CollectionPreparerSupport(List<Object> sources) {
|
|
||||||
this.sources = sources;
|
|
||||||
}
|
|
||||||
|
|
||||||
<T> T doPrepare(T collection, Function<T, ReadConcern> concernAccessor, BiFunction<T, ReadConcern, T> concernFunction,
|
|
||||||
Function<T, ReadPreference> preferenceAccessor, BiFunction<T, ReadPreference, T> preferenceFunction) {
|
|
||||||
|
|
||||||
T collectionToUse = collection;
|
|
||||||
|
|
||||||
for (Object source : sources) {
|
|
||||||
if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
|
||||||
|
|
||||||
ReadConcern concern = rca.getReadConcern();
|
|
||||||
if (concernAccessor.apply(collectionToUse) != concern) {
|
|
||||||
collectionToUse = concernFunction.apply(collectionToUse, concern);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (Object source : sources) {
|
|
||||||
if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
|
||||||
|
|
||||||
ReadPreference preference = rpa.getReadPreference();
|
|
||||||
if (preferenceAccessor.apply(collectionToUse) != preference) {
|
|
||||||
collectionToUse = preferenceFunction.apply(collectionToUse, preference);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return collectionToUse;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasReadConcern() {
|
|
||||||
|
|
||||||
for (Object aware : sources) {
|
|
||||||
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReadConcern getReadConcern() {
|
|
||||||
|
|
||||||
for (Object aware : sources) {
|
|
||||||
if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) {
|
|
||||||
return rca.getReadConcern();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasReadPreference() {
|
|
||||||
|
|
||||||
for (Object aware : sources) {
|
|
||||||
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReadPreference getReadPreference() {
|
|
||||||
|
|
||||||
for (Object aware : sources) {
|
|
||||||
if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) {
|
|
||||||
return rpa.getReadPreference();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static class CollectionPreparerDelegate extends CollectionPreparerSupport
|
|
||||||
implements CollectionPreparer<MongoCollection<Document>> {
|
|
||||||
|
|
||||||
private CollectionPreparerDelegate(List<Object> sources) {
|
|
||||||
super(sources);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) {
|
|
||||||
return of((Object[]) awares);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static CollectionPreparerDelegate of(Object... mixedAwares) {
|
|
||||||
|
|
||||||
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
|
|
||||||
return (CollectionPreparerDelegate) mixedAwares[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
return new CollectionPreparerDelegate(Arrays.asList(mixedAwares));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public MongoCollection<Document> prepare(MongoCollection<Document> collection) {
|
|
||||||
return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern,
|
|
||||||
MongoCollection::getReadPreference, MongoCollection::withReadPreference);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport
|
|
||||||
implements CollectionPreparer<com.mongodb.reactivestreams.client.MongoCollection<Document>> {
|
|
||||||
|
|
||||||
private ReactiveCollectionPreparerDelegate(List<Object> sources) {
|
|
||||||
super(sources);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) {
|
|
||||||
return of((Object[]) awares);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) {
|
|
||||||
|
|
||||||
if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) {
|
|
||||||
return (ReactiveCollectionPreparerDelegate) mixedAwares[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public com.mongodb.reactivestreams.client.MongoCollection<Document> prepare(
|
|
||||||
com.mongodb.reactivestreams.client.MongoCollection<Document> collection) {
|
|
||||||
return doPrepare(collection, //
|
|
||||||
com.mongodb.reactivestreams.client.MongoCollection::getReadConcern,
|
|
||||||
com.mongodb.reactivestreams.client.MongoCollection::withReadConcern,
|
|
||||||
com.mongodb.reactivestreams.client.MongoCollection::getReadPreference,
|
|
||||||
com.mongodb.reactivestreams.client.MongoCollection::withReadPreference);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -64,15 +64,18 @@ class CountQuery {
|
|||||||
|
|
||||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||||
|
|
||||||
if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) {
|
if (entry.getValue() instanceof Document && requiresRewrite(entry.getValue())) {
|
||||||
|
|
||||||
target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and")));
|
Document theValue = (Document) entry.getValue();
|
||||||
|
target.putAll(createGeoWithin(entry.getKey(), theValue, source.get("$and")));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (entry.getValue() instanceof Collection<?> collection && requiresRewrite(entry.getValue())) {
|
if (entry.getValue() instanceof Collection && requiresRewrite(entry.getValue())) {
|
||||||
|
|
||||||
target.put(entry.getKey(), rewriteCollection(collection));
|
Collection<?> source = (Collection<?>) entry.getValue();
|
||||||
|
|
||||||
|
target.put(entry.getKey(), rewriteCollection(source));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -93,12 +96,12 @@ class CountQuery {
|
|||||||
*/
|
*/
|
||||||
private boolean requiresRewrite(Object valueToInspect) {
|
private boolean requiresRewrite(Object valueToInspect) {
|
||||||
|
|
||||||
if (valueToInspect instanceof Document document) {
|
if (valueToInspect instanceof Document) {
|
||||||
return requiresRewrite(document);
|
return requiresRewrite((Document) valueToInspect);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueToInspect instanceof Collection<?> collection) {
|
if (valueToInspect instanceof Collection) {
|
||||||
return requiresRewrite(collection);
|
return requiresRewrite((Collection<?>) valueToInspect);
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
@@ -107,7 +110,7 @@ class CountQuery {
|
|||||||
private boolean requiresRewrite(Collection<?> collection) {
|
private boolean requiresRewrite(Collection<?> collection) {
|
||||||
|
|
||||||
for (Object o : collection) {
|
for (Object o : collection) {
|
||||||
if (o instanceof Document document && requiresRewrite(document)) {
|
if (o instanceof Document && requiresRewrite((Document) o)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -136,8 +139,8 @@ class CountQuery {
|
|||||||
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
Collection<Object> rewrittenCollection = new ArrayList<>(source.size());
|
||||||
|
|
||||||
for (Object item : source) {
|
for (Object item : source) {
|
||||||
if (item instanceof Document document && requiresRewrite(item)) {
|
if (item instanceof Document && requiresRewrite(item)) {
|
||||||
rewrittenCollection.add(CountQuery.of(document).toQueryDocument());
|
rewrittenCollection.add(CountQuery.of((Document) item).toQueryDocument());
|
||||||
} else {
|
} else {
|
||||||
rewrittenCollection.add(item);
|
rewrittenCollection.add(item);
|
||||||
}
|
}
|
||||||
@@ -184,7 +187,7 @@ class CountQuery {
|
|||||||
criteria.addAll(andElements);
|
criteria.addAll(andElements);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Cannot rewrite query as it contains an '$and' element that is not a Collection: Offending element: "
|
"Cannot rewrite query as it contains an '$and' element that is not a Collection!: Offending element: "
|
||||||
+ $and);
|
+ $and);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -205,7 +208,9 @@ class CountQuery {
|
|||||||
return (Number) source.get("$maxDistance");
|
return (Number) source.get("$maxDistance");
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($near instanceof Document nearDoc) {
|
if ($near instanceof Document) {
|
||||||
|
|
||||||
|
Document nearDoc = (Document) $near;
|
||||||
|
|
||||||
if (nearDoc.containsKey("$maxDistance")) {
|
if (nearDoc.containsKey("$maxDistance")) {
|
||||||
|
|
||||||
@@ -239,11 +244,12 @@ class CountQuery {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value instanceof Point point) {
|
if (value instanceof Point) {
|
||||||
return Arrays.asList(point.getX(), point.getY());
|
return Arrays.asList(((Point) value).getX(), ((Point) value).getY());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value instanceof Document document) {
|
if (value instanceof Document) {
|
||||||
|
Document document = (Document) value;
|
||||||
|
|
||||||
if (document.containsKey("x")) {
|
if (document.containsKey("x")) {
|
||||||
return Arrays.asList(document.get("x"), document.get("y"));
|
return Arrays.asList(document.get("x"), document.get("y"));
|
||||||
|
|||||||
@@ -61,8 +61,8 @@ public interface CursorPreparer extends ReadPreferenceAware {
|
|||||||
default FindIterable<Document> initiateFind(MongoCollection<Document> collection,
|
default FindIterable<Document> initiateFind(MongoCollection<Document> collection,
|
||||||
Function<MongoCollection<Document>, FindIterable<Document>> find) {
|
Function<MongoCollection<Document>, FindIterable<Document>> find) {
|
||||||
|
|
||||||
Assert.notNull(collection, "Collection must not be null");
|
Assert.notNull(collection, "Collection must not be null!");
|
||||||
Assert.notNull(find, "Find function must not be null");
|
Assert.notNull(find, "Find function must not be null!");
|
||||||
|
|
||||||
if (hasReadPreference()) {
|
if (hasReadPreference()) {
|
||||||
collection = collection.withReadPreference(getReadPreference());
|
collection = collection.withReadPreference(getReadPreference());
|
||||||
|
|||||||
@@ -16,47 +16,42 @@
|
|||||||
package org.springframework.data.mongodb.core;
|
package org.springframework.data.mongodb.core;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.springframework.context.ApplicationEvent;
|
import org.bson.conversions.Bson;
|
||||||
import org.springframework.context.ApplicationEventPublisher;
|
import org.springframework.context.ApplicationEventPublisher;
|
||||||
import org.springframework.dao.DataIntegrityViolationException;
|
import org.springframework.dao.DataIntegrityViolationException;
|
||||||
import org.springframework.data.mapping.callback.EntityCallback;
|
|
||||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||||
import org.springframework.data.mongodb.BulkOperationException;
|
import org.springframework.data.mongodb.BulkOperationException;
|
||||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback;
|
||||||
|
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||||
|
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||||
import org.springframework.data.mongodb.core.query.Collation;
|
import org.springframework.data.mongodb.core.query.Collation;
|
||||||
import org.springframework.data.mongodb.core.query.Query;
|
import org.springframework.data.mongodb.core.query.Query;
|
||||||
import org.springframework.data.mongodb.core.query.Update;
|
import org.springframework.data.mongodb.core.query.Update;
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||||
|
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||||
import org.springframework.data.util.Pair;
|
import org.springframework.data.util.Pair;
|
||||||
import org.springframework.lang.Nullable;
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
|
import org.springframework.util.ObjectUtils;
|
||||||
|
|
||||||
import com.mongodb.MongoBulkWriteException;
|
import com.mongodb.MongoBulkWriteException;
|
||||||
import com.mongodb.WriteConcern;
|
import com.mongodb.WriteConcern;
|
||||||
import com.mongodb.bulk.BulkWriteResult;
|
import com.mongodb.bulk.BulkWriteResult;
|
||||||
import com.mongodb.client.MongoCollection;
|
import com.mongodb.client.MongoCollection;
|
||||||
import com.mongodb.client.model.BulkWriteOptions;
|
import com.mongodb.client.model.*;
|
||||||
import com.mongodb.client.model.DeleteManyModel;
|
|
||||||
import com.mongodb.client.model.DeleteOptions;
|
|
||||||
import com.mongodb.client.model.InsertOneModel;
|
|
||||||
import com.mongodb.client.model.ReplaceOneModel;
|
|
||||||
import com.mongodb.client.model.ReplaceOptions;
|
|
||||||
import com.mongodb.client.model.UpdateManyModel;
|
|
||||||
import com.mongodb.client.model.UpdateOneModel;
|
|
||||||
import com.mongodb.client.model.UpdateOptions;
|
|
||||||
import com.mongodb.client.model.WriteModel;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default implementation for {@link BulkOperations}.
|
* Default implementation for {@link BulkOperations}.
|
||||||
@@ -72,7 +67,7 @@ import com.mongodb.client.model.WriteModel;
|
|||||||
* @author Jacob Botuck
|
* @author Jacob Botuck
|
||||||
* @since 1.9
|
* @since 1.9
|
||||||
*/
|
*/
|
||||||
class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations {
|
class DefaultBulkOperations implements BulkOperations {
|
||||||
|
|
||||||
private final MongoOperations mongoOperations;
|
private final MongoOperations mongoOperations;
|
||||||
private final String collectionName;
|
private final String collectionName;
|
||||||
@@ -80,6 +75,7 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||||
|
|
||||||
private @Nullable WriteConcern defaultWriteConcern;
|
private @Nullable WriteConcern defaultWriteConcern;
|
||||||
|
|
||||||
private BulkWriteOptions bulkOptions;
|
private BulkWriteOptions bulkOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -94,15 +90,14 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
DefaultBulkOperations(MongoOperations mongoOperations, String collectionName,
|
DefaultBulkOperations(MongoOperations mongoOperations, String collectionName,
|
||||||
BulkOperationContext bulkOperationContext) {
|
BulkOperationContext bulkOperationContext) {
|
||||||
|
|
||||||
super(collectionName);
|
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
Assert.hasText(collectionName, "CollectionName must not be null nor empty!");
|
||||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null!");
|
||||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
|
|
||||||
|
|
||||||
this.mongoOperations = mongoOperations;
|
this.mongoOperations = mongoOperations;
|
||||||
this.collectionName = collectionName;
|
this.collectionName = collectionName;
|
||||||
this.bulkOperationContext = bulkOperationContext;
|
this.bulkOperationContext = bulkOperationContext;
|
||||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -114,10 +109,14 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
this.defaultWriteConcern = defaultWriteConcern;
|
this.defaultWriteConcern = defaultWriteConcern;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations insert(Object document) {
|
public BulkOperations insert(Object document) {
|
||||||
|
|
||||||
Assert.notNull(document, "Document must not be null");
|
Assert.notNull(document, "Document must not be null!");
|
||||||
|
|
||||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||||
@@ -126,65 +125,93 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations insert(List<? extends Object> documents) {
|
public BulkOperations insert(List<? extends Object> documents) {
|
||||||
|
|
||||||
Assert.notNull(documents, "Documents must not be null");
|
Assert.notNull(documents, "Documents must not be null!");
|
||||||
|
|
||||||
documents.forEach(this::insert);
|
documents.forEach(this::insert);
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations updateOne(Query query, UpdateDefinition update) {
|
@SuppressWarnings("unchecked")
|
||||||
|
public BulkOperations updateOne(Query query, Update update) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
Assert.notNull(update, "Update must not be null");
|
Assert.notNull(update, "Update must not be null!");
|
||||||
|
|
||||||
return update(query, update, false, false);
|
return updateOne(Collections.singletonList(Pair.of(query, update)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations updateOne(List<Pair<Query, UpdateDefinition>> updates) {
|
public BulkOperations updateOne(List<Pair<Query, Update>> updates) {
|
||||||
|
|
||||||
Assert.notNull(updates, "Updates must not be null");
|
Assert.notNull(updates, "Updates must not be null!");
|
||||||
|
|
||||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
for (Pair<Query, Update> update : updates) {
|
||||||
update(update.getFirst(), update.getSecond(), false, false);
|
update(update.getFirst(), update.getSecond(), false, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations updateMulti(Query query, UpdateDefinition update) {
|
@SuppressWarnings("unchecked")
|
||||||
|
public BulkOperations updateMulti(Query query, Update update) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
Assert.notNull(update, "Update must not be null");
|
Assert.notNull(update, "Update must not be null!");
|
||||||
|
|
||||||
update(query, update, false, true);
|
return updateMulti(Collections.singletonList(Pair.of(query, update)));
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations updateMulti(List<Pair<Query, UpdateDefinition>> updates) {
|
public BulkOperations updateMulti(List<Pair<Query, Update>> updates) {
|
||||||
|
|
||||||
Assert.notNull(updates, "Updates must not be null");
|
Assert.notNull(updates, "Updates must not be null!");
|
||||||
|
|
||||||
for (Pair<Query, UpdateDefinition> update : updates) {
|
for (Pair<Query, Update> update : updates) {
|
||||||
update(update.getFirst(), update.getSecond(), false, true);
|
update(update.getFirst(), update.getSecond(), false, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations upsert(Query query, UpdateDefinition update) {
|
public BulkOperations upsert(Query query, Update update) {
|
||||||
return update(query, update, true, true);
|
return update(query, update, true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations upsert(List<Pair<Query, Update>> updates) {
|
public BulkOperations upsert(List<Pair<Query, Update>> updates) {
|
||||||
|
|
||||||
@@ -195,10 +222,14 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations remove(Query query) {
|
public BulkOperations remove(Query query) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
|
|
||||||
DeleteOptions deleteOptions = new DeleteOptions();
|
DeleteOptions deleteOptions = new DeleteOptions();
|
||||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||||
@@ -208,10 +239,14 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations remove(List<Query> removes) {
|
public BulkOperations remove(List<Query> removes) {
|
||||||
|
|
||||||
Assert.notNull(removes, "Removals must not be null");
|
Assert.notNull(removes, "Removals must not be null!");
|
||||||
|
|
||||||
for (Query query : removes) {
|
for (Query query : removes) {
|
||||||
remove(query);
|
remove(query);
|
||||||
@@ -220,12 +255,16 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
Assert.notNull(replacement, "Replacement must not be null");
|
Assert.notNull(replacement, "Replacement must not be null!");
|
||||||
Assert.notNull(options, "Options must not be null");
|
Assert.notNull(options, "Options must not be null!");
|
||||||
|
|
||||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||||
replaceOptions.upsert(options.isUpsert());
|
replaceOptions.upsert(options.isUpsert());
|
||||||
@@ -239,6 +278,10 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public com.mongodb.bulk.BulkWriteResult execute() {
|
public com.mongodb.bulk.BulkWriteResult execute() {
|
||||||
|
|
||||||
@@ -246,14 +289,14 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
|
|
||||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||||
|
|
||||||
Assert.state(result != null, "Result must not be null");
|
Assert.state(result != null, "Result must not be null.");
|
||||||
|
|
||||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||||
models.forEach(this::maybeInvokeAfterSaveCallback);
|
models.forEach(this::maybeInvokeAfterSaveCallback);
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
} finally {
|
} finally {
|
||||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -272,8 +315,9 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
bulkOptions);
|
bulkOptions);
|
||||||
} catch (RuntimeException ex) {
|
} catch (RuntimeException ex) {
|
||||||
|
|
||||||
if (ex instanceof MongoBulkWriteException mongoBulkWriteException) {
|
if (ex instanceof MongoBulkWriteException) {
|
||||||
|
|
||||||
|
MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex;
|
||||||
if (mongoBulkWriteException.getWriteConcernError() != null) {
|
if (mongoBulkWriteException.getWriteConcernError() != null) {
|
||||||
throw new DataIntegrityViolationException(ex.getMessage(), ex);
|
throw new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||||
}
|
}
|
||||||
@@ -288,17 +332,17 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
|
|
||||||
maybeEmitBeforeSaveEvent(it);
|
maybeEmitBeforeSaveEvent(it);
|
||||||
|
|
||||||
if (it.model() instanceof InsertOneModel<Document> model) {
|
if (it.getModel() instanceof InsertOneModel) {
|
||||||
|
|
||||||
Document target = model.getDocument();
|
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||||
} else if (it.model() instanceof ReplaceOneModel<Document> model) {
|
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||||
|
|
||||||
Document target = model.getReplacement();
|
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||||
maybeInvokeBeforeSaveCallback(it.source(), target);
|
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||||
}
|
}
|
||||||
|
|
||||||
return mapWriteModel(it.source(), it.model());
|
return mapWriteModel(it.getModel());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -310,10 +354,10 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
* @param multi whether to issue a multi-update.
|
* @param multi whether to issue a multi-update.
|
||||||
* @return the {@link BulkOperations} with the update registered.
|
* @return the {@link BulkOperations} with the update registered.
|
||||||
*/
|
*/
|
||||||
private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
|
private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
Assert.notNull(update, "Update must not be null");
|
Assert.notNull(update, "Update must not be null!");
|
||||||
|
|
||||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
||||||
|
|
||||||
@@ -326,30 +370,53 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
private WriteModel<Document> mapWriteModel(WriteModel<Document> writeModel) {
|
||||||
protected void maybeEmitEvent(ApplicationEvent event) {
|
|
||||||
bulkOperationContext.publishEvent(event);
|
if (writeModel instanceof UpdateOneModel) {
|
||||||
|
|
||||||
|
UpdateOneModel<Document> model = (UpdateOneModel<Document>) writeModel;
|
||||||
|
|
||||||
|
return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||||
|
model.getOptions());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (writeModel instanceof UpdateManyModel) {
|
||||||
|
|
||||||
|
UpdateManyModel<Document> model = (UpdateManyModel<Document>) writeModel;
|
||||||
|
|
||||||
|
return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()),
|
||||||
|
model.getOptions());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (writeModel instanceof DeleteOneModel) {
|
||||||
|
|
||||||
|
DeleteOneModel<Document> model = (DeleteOneModel<Document>) writeModel;
|
||||||
|
|
||||||
|
return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (writeModel instanceof DeleteManyModel) {
|
||||||
|
|
||||||
|
DeleteManyModel<Document> model = (DeleteManyModel<Document>) writeModel;
|
||||||
|
|
||||||
|
return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions());
|
||||||
|
}
|
||||||
|
|
||||||
|
return writeModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
private Bson getMappedUpdate(Bson update) {
|
||||||
protected UpdateMapper updateMapper() {
|
return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity());
|
||||||
return bulkOperationContext.updateMapper();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
private Bson getMappedQuery(Bson query) {
|
||||||
protected QueryMapper queryMapper() {
|
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||||
return bulkOperationContext.queryMapper();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Optional<? extends MongoPersistentEntity<?>> entity() {
|
|
||||||
return bulkOperationContext.entity();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Document getMappedObject(Object source) {
|
private Document getMappedObject(Object source) {
|
||||||
|
|
||||||
if (source instanceof Document document) {
|
if (source instanceof Document) {
|
||||||
return document;
|
return (Document) source;
|
||||||
}
|
}
|
||||||
|
|
||||||
Document sink = new Document();
|
Document sink = new Document();
|
||||||
@@ -362,83 +429,268 @@ class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperati
|
|||||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||||
|
|
||||||
if (holder.model() instanceof InsertOneModel<Document> model) {
|
if (holder.getModel() instanceof InsertOneModel) {
|
||||||
|
|
||||||
Document target = model.getDocument();
|
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||||
} else if (holder.model() instanceof ReplaceOneModel<Document> model) {
|
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||||
|
|
||||||
Document target = model.getReplacement();
|
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||||
maybeInvokeAfterSaveCallback(holder.source(), target);
|
maybeEmitEvent(new BeforeSaveEvent<>(holder.getSource(), target, collectionName));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void publishEvent(MongoMappingEvent<?> event) {
|
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) {
|
||||||
bulkOperationContext.publishEvent(event);
|
|
||||||
|
if (holder.getModel() instanceof InsertOneModel) {
|
||||||
|
|
||||||
|
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||||
|
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||||
|
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||||
|
|
||||||
|
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||||
|
maybeEmitEvent(new AfterSaveEvent<>(holder.getSource(), target, collectionName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
||||||
|
|
||||||
|
if (holder.getModel() instanceof InsertOneModel) {
|
||||||
|
|
||||||
|
Document target = ((InsertOneModel<Document>) holder.getModel()).getDocument();
|
||||||
|
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||||
|
} else if (holder.getModel() instanceof ReplaceOneModel) {
|
||||||
|
|
||||||
|
Document target = ((ReplaceOneModel<Document>) holder.getModel()).getReplacement();
|
||||||
|
maybeInvokeAfterSaveCallback(holder.getSource(), target);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||||
|
|
||||||
|
if (bulkOperationContext.getEventPublisher() == null) {
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
|
||||||
|
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||||
|
return event;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||||
return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName);
|
|
||||||
|
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||||
return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName);
|
|
||||||
|
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||||
|
collectionName);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
||||||
return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName);
|
|
||||||
|
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return bulkOperationContext.getEntityCallbacks().callback(AfterSaveCallback.class, value, mappedDocument,
|
||||||
|
collectionName);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||||
|
|
||||||
|
BulkWriteOptions options = new BulkWriteOptions();
|
||||||
|
|
||||||
|
switch (bulkMode) {
|
||||||
|
case ORDERED:
|
||||||
|
return options.ordered(true);
|
||||||
|
case UNORDERED:
|
||||||
|
return options.ordered(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new IllegalStateException("BulkMode was null!");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to
|
* @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}.
|
||||||
|
* @param update The {@link Update} to apply
|
||||||
|
* @param upsert flag to indicate if document should be upserted.
|
||||||
|
* @return new instance of {@link UpdateOptions}.
|
||||||
|
*/
|
||||||
|
private static UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert) {
|
||||||
|
|
||||||
|
UpdateOptions options = new UpdateOptions();
|
||||||
|
options.upsert(upsert);
|
||||||
|
|
||||||
|
if (update.hasArrayFilters()) {
|
||||||
|
List<Document> list = new ArrayList<>(update.getArrayFilters().size());
|
||||||
|
for (ArrayFilter arrayFilter : update.getArrayFilters()) {
|
||||||
|
list.add(arrayFilter.asDocument());
|
||||||
|
}
|
||||||
|
options.arrayFilters(list);
|
||||||
|
}
|
||||||
|
|
||||||
|
filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@link BulkOperationContext} holds information about
|
||||||
|
* {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to
|
||||||
* {@link QueryMapper} and {@link UpdateMapper}.
|
* {@link QueryMapper} and {@link UpdateMapper}.
|
||||||
*
|
*
|
||||||
* @author Christoph Strobl
|
* @author Christoph Strobl
|
||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
record BulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
static final class BulkOperationContext {
|
||||||
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
|
|
||||||
@Nullable EntityCallbacks entityCallbacks) {
|
|
||||||
|
|
||||||
public boolean skipEntityCallbacks() {
|
private final BulkMode bulkMode;
|
||||||
return entityCallbacks == null;
|
private final Optional<? extends MongoPersistentEntity<?>> entity;
|
||||||
|
private final QueryMapper queryMapper;
|
||||||
|
private final UpdateMapper updateMapper;
|
||||||
|
private final ApplicationEventPublisher eventPublisher;
|
||||||
|
private final EntityCallbacks entityCallbacks;
|
||||||
|
|
||||||
|
BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||||
|
QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher,
|
||||||
|
EntityCallbacks entityCallbacks) {
|
||||||
|
|
||||||
|
this.bulkMode = bulkMode;
|
||||||
|
this.entity = entity;
|
||||||
|
this.queryMapper = queryMapper;
|
||||||
|
this.updateMapper = updateMapper;
|
||||||
|
this.eventPublisher = eventPublisher;
|
||||||
|
this.entityCallbacks = entityCallbacks;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean skipEventPublishing() {
|
public BulkMode getBulkMode() {
|
||||||
return eventPublisher == null;
|
return this.bulkMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
public Optional<? extends MongoPersistentEntity<?>> getEntity() {
|
||||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
|
return this.entity;
|
||||||
|
}
|
||||||
|
|
||||||
if (skipEntityCallbacks()) {
|
public QueryMapper getQueryMapper() {
|
||||||
return entity;
|
return this.queryMapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
public UpdateMapper getUpdateMapper() {
|
||||||
|
return this.updateMapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ApplicationEventPublisher getEventPublisher() {
|
||||||
|
return this.eventPublisher;
|
||||||
|
}
|
||||||
|
|
||||||
|
public EntityCallbacks getEntityCallbacks() {
|
||||||
|
return this.entityCallbacks;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(@Nullable Object o) {
|
||||||
|
if (this == o)
|
||||||
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
BulkOperationContext that = (BulkOperationContext) o;
|
||||||
|
|
||||||
|
if (bulkMode != that.bulkMode)
|
||||||
|
return false;
|
||||||
|
if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) {
|
||||||
return entityCallbacks.callback(callbackType, entity, collectionName);
|
return false;
|
||||||
|
}
|
||||||
|
if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
@Override
|
||||||
public <T> T callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
|
public int hashCode() {
|
||||||
String collectionName) {
|
int result = bulkMode != null ? bulkMode.hashCode() : 0;
|
||||||
|
result = 31 * result + ObjectUtils.nullSafeHashCode(entity);
|
||||||
if (skipEntityCallbacks()) {
|
result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper);
|
||||||
return entity;
|
result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper);
|
||||||
}
|
result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher);
|
||||||
|
result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks);
|
||||||
return entityCallbacks.callback(callbackType, entity, document, collectionName);
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void publishEvent(ApplicationEvent event) {
|
public String toString() {
|
||||||
|
return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity="
|
||||||
if (skipEventPublishing()) {
|
+ this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper()
|
||||||
return;
|
+ ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")";
|
||||||
}
|
|
||||||
|
|
||||||
eventPublisher.publishEvent(event);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||||
|
*
|
||||||
|
* @since 2.2
|
||||||
|
* @author Christoph Strobl
|
||||||
|
*/
|
||||||
|
private static final class SourceAwareWriteModelHolder {
|
||||||
|
|
||||||
|
private final Object source;
|
||||||
|
private final WriteModel<Document> model;
|
||||||
|
|
||||||
|
SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||||
|
|
||||||
|
this.source = source;
|
||||||
|
this.model = model;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object getSource() {
|
||||||
|
return this.source;
|
||||||
|
}
|
||||||
|
|
||||||
|
public WriteModel<Document> getModel() {
|
||||||
|
return this.model;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(@Nullable Object o) {
|
||||||
|
if (this == o)
|
||||||
|
return true;
|
||||||
|
if (o == null || getClass() != o.getClass())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o;
|
||||||
|
|
||||||
|
if (!ObjectUtils.nullSafeEquals(this.source, that.source)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return ObjectUtils.nullSafeEquals(this.model, that.model);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
int result = ObjectUtils.nullSafeHashCode(model);
|
||||||
|
result = 31 * result + ObjectUtils.nullSafeHashCode(source);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model="
|
||||||
|
+ this.getModel() + ")";
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,7 +22,6 @@ import java.util.List;
|
|||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.springframework.dao.DataAccessException;
|
import org.springframework.dao.DataAccessException;
|
||||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
|
||||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||||
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.index.IndexOperations;
|
|||||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||||
import org.springframework.lang.Nullable;
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.util.NumberUtils;
|
|
||||||
|
|
||||||
import com.mongodb.MongoException;
|
import com.mongodb.MongoException;
|
||||||
import com.mongodb.client.MongoCollection;
|
import com.mongodb.client.MongoCollection;
|
||||||
@@ -85,9 +83,9 @@ public class DefaultIndexOperations implements IndexOperations {
|
|||||||
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper,
|
||||||
@Nullable Class<?> type) {
|
@Nullable Class<?> type) {
|
||||||
|
|
||||||
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null");
|
Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!");
|
||||||
Assert.notNull(collectionName, "Collection name can not be null");
|
Assert.notNull(collectionName, "Collection name can not be null!");
|
||||||
Assert.notNull(queryMapper, "QueryMapper must not be null");
|
Assert.notNull(queryMapper, "QueryMapper must not be null!");
|
||||||
|
|
||||||
this.collectionName = collectionName;
|
this.collectionName = collectionName;
|
||||||
this.mapper = queryMapper;
|
this.mapper = queryMapper;
|
||||||
@@ -105,8 +103,8 @@ public class DefaultIndexOperations implements IndexOperations {
|
|||||||
*/
|
*/
|
||||||
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class<?> type) {
|
public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class<?> type) {
|
||||||
|
|
||||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||||
Assert.hasText(collectionName, "Collection name must not be null or empty");
|
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||||
|
|
||||||
this.mongoOperations = mongoOperations;
|
this.mongoOperations = mongoOperations;
|
||||||
this.mapper = new QueryMapper(mongoOperations.getConverter());
|
this.mapper = new QueryMapper(mongoOperations.getConverter());
|
||||||
@@ -114,6 +112,10 @@ public class DefaultIndexOperations implements IndexOperations {
|
|||||||
this.type = type;
|
this.type = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
|
||||||
|
*/
|
||||||
public String ensureIndex(final IndexDefinition indexDefinition) {
|
public String ensureIndex(final IndexDefinition indexDefinition) {
|
||||||
|
|
||||||
return execute(collection -> {
|
return execute(collection -> {
|
||||||
@@ -148,6 +150,10 @@ public class DefaultIndexOperations implements IndexOperations {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.IndexOperations#dropIndex(java.lang.String)
|
||||||
|
*/
|
||||||
public void dropIndex(final String name) {
|
public void dropIndex(final String name) {
|
||||||
|
|
||||||
execute(collection -> {
|
execute(collection -> {
|
||||||
@@ -157,24 +163,18 @@ public class DefaultIndexOperations implements IndexOperations {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
/*
|
||||||
public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.IndexOperations#dropAllIndexes()
|
||||||
Document indexOptions = new Document("name", name);
|
*/
|
||||||
indexOptions.putAll(options.toDocument());
|
|
||||||
|
|
||||||
Document result = mongoOperations
|
|
||||||
.execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)));
|
|
||||||
|
|
||||||
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
|
|
||||||
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void dropAllIndexes() {
|
public void dropAllIndexes() {
|
||||||
dropIndex("*");
|
dropIndex("*");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.IndexOperations#getIndexInfo()
|
||||||
|
*/
|
||||||
public List<IndexInfo> getIndexInfo() {
|
public List<IndexInfo> getIndexInfo() {
|
||||||
|
|
||||||
return execute(new CollectionCallback<List<IndexInfo>>() {
|
return execute(new CollectionCallback<List<IndexInfo>>() {
|
||||||
@@ -206,7 +206,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
|||||||
@Nullable
|
@Nullable
|
||||||
public <T> T execute(CollectionCallback<T> callback) {
|
public <T> T execute(CollectionCallback<T> callback) {
|
||||||
|
|
||||||
Assert.notNull(callback, "CollectionCallback must not be null");
|
Assert.notNull(callback, "CollectionCallback must not be null!");
|
||||||
|
|
||||||
if (type != null) {
|
if (type != null) {
|
||||||
return mongoOperations.execute(type, callback);
|
return mongoOperations.execute(type, callback);
|
||||||
|
|||||||
@@ -42,6 +42,10 @@ class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
|||||||
this.mapper = mapper;
|
this.mapper = mapper;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public IndexOperations indexOps(String collectionName, Class<?> type) {
|
public IndexOperations indexOps(String collectionName, Class<?> type) {
|
||||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type);
|
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type);
|
||||||
|
|||||||
@@ -1,390 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.core;
|
|
||||||
|
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import org.bson.Document;
|
|
||||||
import org.springframework.context.ApplicationEvent;
|
|
||||||
import org.springframework.context.ApplicationEventPublisher;
|
|
||||||
import org.springframework.data.mapping.callback.EntityCallback;
|
|
||||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
|
||||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
|
||||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
|
||||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback;
|
|
||||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback;
|
|
||||||
import org.springframework.data.mongodb.core.query.Collation;
|
|
||||||
import org.springframework.data.mongodb.core.query.Query;
|
|
||||||
import org.springframework.data.mongodb.core.query.Update;
|
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
|
||||||
import org.springframework.lang.Nullable;
|
|
||||||
import org.springframework.util.Assert;
|
|
||||||
|
|
||||||
import com.mongodb.WriteConcern;
|
|
||||||
import com.mongodb.bulk.BulkWriteResult;
|
|
||||||
import com.mongodb.client.model.BulkWriteOptions;
|
|
||||||
import com.mongodb.client.model.DeleteManyModel;
|
|
||||||
import com.mongodb.client.model.DeleteOptions;
|
|
||||||
import com.mongodb.client.model.InsertOneModel;
|
|
||||||
import com.mongodb.client.model.ReplaceOneModel;
|
|
||||||
import com.mongodb.client.model.ReplaceOptions;
|
|
||||||
import com.mongodb.client.model.UpdateManyModel;
|
|
||||||
import com.mongodb.client.model.UpdateOneModel;
|
|
||||||
import com.mongodb.client.model.UpdateOptions;
|
|
||||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default implementation for {@link ReactiveBulkOperations}.
|
|
||||||
*
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @author Mark Paluch
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations {
|
|
||||||
|
|
||||||
private final ReactiveMongoOperations mongoOperations;
|
|
||||||
private final String collectionName;
|
|
||||||
private final ReactiveBulkOperationContext bulkOperationContext;
|
|
||||||
private final List<Mono<SourceAwareWriteModelHolder>> models = new ArrayList<>();
|
|
||||||
|
|
||||||
private @Nullable WriteConcern defaultWriteConcern;
|
|
||||||
|
|
||||||
private BulkWriteOptions bulkOptions;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and
|
|
||||||
* {@link ReactiveBulkOperationContext}.
|
|
||||||
*
|
|
||||||
* @param mongoOperations must not be {@literal null}.
|
|
||||||
* @param collectionName must not be {@literal null}.
|
|
||||||
* @param bulkOperationContext must not be {@literal null}.
|
|
||||||
*/
|
|
||||||
DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName,
|
|
||||||
ReactiveBulkOperationContext bulkOperationContext) {
|
|
||||||
|
|
||||||
super(collectionName);
|
|
||||||
|
|
||||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
|
||||||
Assert.hasText(collectionName, "CollectionName must not be null nor empty");
|
|
||||||
Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null");
|
|
||||||
|
|
||||||
this.mongoOperations = mongoOperations;
|
|
||||||
this.collectionName = collectionName;
|
|
||||||
this.bulkOperationContext = bulkOperationContext;
|
|
||||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
|
||||||
*
|
|
||||||
* @param defaultWriteConcern can be {@literal null}.
|
|
||||||
*/
|
|
||||||
void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) {
|
|
||||||
this.defaultWriteConcern = defaultWriteConcern;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations insert(Object document) {
|
|
||||||
|
|
||||||
Assert.notNull(document, "Document must not be null");
|
|
||||||
|
|
||||||
this.models.add(Mono.just(document).flatMap(it -> {
|
|
||||||
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
|
|
||||||
return maybeInvokeBeforeConvertCallback(it);
|
|
||||||
}).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it)))));
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations insert(List<? extends Object> documents) {
|
|
||||||
|
|
||||||
Assert.notNull(documents, "Documents must not be null");
|
|
||||||
|
|
||||||
documents.forEach(this::insert);
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) {
|
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
|
||||||
Assert.notNull(update, "Update must not be null");
|
|
||||||
|
|
||||||
update(query, update, false, false);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) {
|
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
|
||||||
Assert.notNull(update, "Update must not be null");
|
|
||||||
|
|
||||||
update(query, update, false, true);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) {
|
|
||||||
return update(query, update, true, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations remove(Query query) {
|
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
|
||||||
|
|
||||||
DeleteOptions deleteOptions = new DeleteOptions();
|
|
||||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
|
||||||
|
|
||||||
this.models.add(Mono.just(query)
|
|
||||||
.map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions))));
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations remove(List<Query> removes) {
|
|
||||||
|
|
||||||
Assert.notNull(removes, "Removals must not be null");
|
|
||||||
|
|
||||||
for (Query query : removes) {
|
|
||||||
remove(query);
|
|
||||||
}
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
|
||||||
Assert.notNull(replacement, "Replacement must not be null");
|
|
||||||
Assert.notNull(options, "Options must not be null");
|
|
||||||
|
|
||||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
|
||||||
replaceOptions.upsert(options.isUpsert());
|
|
||||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
|
||||||
|
|
||||||
this.models.add(Mono.just(replacement).flatMap(it -> {
|
|
||||||
maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName));
|
|
||||||
return maybeInvokeBeforeConvertCallback(it);
|
|
||||||
}).map(it -> new SourceAwareWriteModelHolder(it,
|
|
||||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(it), replaceOptions))));
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<BulkWriteResult> execute() {
|
|
||||||
|
|
||||||
try {
|
|
||||||
return mongoOperations.execute(collectionName, this::bulkWriteTo).next();
|
|
||||||
} finally {
|
|
||||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<BulkWriteResult> bulkWriteTo(MongoCollection<Document> collection) {
|
|
||||||
|
|
||||||
if (defaultWriteConcern != null) {
|
|
||||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
|
||||||
}
|
|
||||||
|
|
||||||
Flux<SourceAwareWriteModelHolder> concat = Flux.concat(models).flatMap(it -> {
|
|
||||||
|
|
||||||
if (it.model()instanceof InsertOneModel<Document> iom) {
|
|
||||||
|
|
||||||
Document target = iom.getDocument();
|
|
||||||
maybeEmitBeforeSaveEvent(it);
|
|
||||||
return maybeInvokeBeforeSaveCallback(it.source(), target)
|
|
||||||
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom)));
|
|
||||||
} else if (it.model()instanceof ReplaceOneModel<Document> rom) {
|
|
||||||
|
|
||||||
Document target = rom.getReplacement();
|
|
||||||
maybeEmitBeforeSaveEvent(it);
|
|
||||||
return maybeInvokeBeforeSaveCallback(it.source(), target)
|
|
||||||
.map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom)));
|
|
||||||
}
|
|
||||||
|
|
||||||
return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model())));
|
|
||||||
});
|
|
||||||
|
|
||||||
MongoCollection<Document> theCollection = collection;
|
|
||||||
return concat.collectList().flatMap(it -> {
|
|
||||||
|
|
||||||
return Mono
|
|
||||||
.from(theCollection
|
|
||||||
.bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions))
|
|
||||||
.doOnSuccess(state -> {
|
|
||||||
it.forEach(this::maybeEmitAfterSaveEvent);
|
|
||||||
}).flatMap(state -> {
|
|
||||||
List<Mono<Object>> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList());
|
|
||||||
|
|
||||||
return Flux.concat(monos).then(Mono.just(state));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Performs update and upsert bulk operations.
|
|
||||||
*
|
|
||||||
* @param query the {@link Query} to determine documents to update.
|
|
||||||
* @param update the {@link Update} to perform, must not be {@literal null}.
|
|
||||||
* @param upsert whether to upsert.
|
|
||||||
* @param multi whether to issue a multi-update.
|
|
||||||
* @return the {@link BulkOperations} with the update registered.
|
|
||||||
*/
|
|
||||||
private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) {
|
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
|
||||||
Assert.notNull(update, "Update must not be null");
|
|
||||||
|
|
||||||
UpdateOptions options = computeUpdateOptions(query, update, upsert);
|
|
||||||
|
|
||||||
this.models.add(Mono.just(update).map(it -> {
|
|
||||||
if (multi) {
|
|
||||||
return new SourceAwareWriteModelHolder(update,
|
|
||||||
new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options));
|
|
||||||
}
|
|
||||||
return new SourceAwareWriteModelHolder(update,
|
|
||||||
new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options));
|
|
||||||
}));
|
|
||||||
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void maybeEmitEvent(ApplicationEvent event) {
|
|
||||||
bulkOperationContext.publishEvent(event);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected UpdateMapper updateMapper() {
|
|
||||||
return bulkOperationContext.updateMapper();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected QueryMapper queryMapper() {
|
|
||||||
return bulkOperationContext.queryMapper();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Optional<? extends MongoPersistentEntity<?>> entity() {
|
|
||||||
return bulkOperationContext.entity();
|
|
||||||
}
|
|
||||||
|
|
||||||
private Document getMappedObject(Object source) {
|
|
||||||
|
|
||||||
if (source instanceof Document) {
|
|
||||||
return (Document) source;
|
|
||||||
}
|
|
||||||
|
|
||||||
Document sink = new Document();
|
|
||||||
|
|
||||||
mongoOperations.getConverter().write(source, sink);
|
|
||||||
return sink;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<Object> maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) {
|
|
||||||
|
|
||||||
if (holder.model() instanceof InsertOneModel) {
|
|
||||||
|
|
||||||
Document target = ((InsertOneModel<Document>) holder.model()).getDocument();
|
|
||||||
return maybeInvokeAfterSaveCallback(holder.source(), target);
|
|
||||||
} else if (holder.model() instanceof ReplaceOneModel) {
|
|
||||||
|
|
||||||
Document target = ((ReplaceOneModel<Document>) holder.model()).getReplacement();
|
|
||||||
return maybeInvokeAfterSaveCallback(holder.source(), target);
|
|
||||||
}
|
|
||||||
return Mono.just(holder.source());
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<Object> maybeInvokeBeforeConvertCallback(Object value) {
|
|
||||||
return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName);
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<Object> maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
|
||||||
return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName);
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<Object> maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) {
|
|
||||||
return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as
|
|
||||||
* references to {@link QueryMapper} and {@link UpdateMapper}.
|
|
||||||
*
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 2.0
|
|
||||||
*/
|
|
||||||
record ReactiveBulkOperationContext(BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
|
||||||
QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher,
|
|
||||||
@Nullable ReactiveEntityCallbacks entityCallbacks) {
|
|
||||||
|
|
||||||
public boolean skipEntityCallbacks() {
|
|
||||||
return entityCallbacks == null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean skipEventPublishing() {
|
|
||||||
return eventPublisher == null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, String collectionName) {
|
|
||||||
|
|
||||||
if (skipEntityCallbacks()) {
|
|
||||||
return Mono.just(entity);
|
|
||||||
}
|
|
||||||
|
|
||||||
return entityCallbacks.callback(callbackType, entity, collectionName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
public <T> Mono<T> callback(Class<? extends EntityCallback> callbackType, T entity, Document document,
|
|
||||||
String collectionName) {
|
|
||||||
|
|
||||||
if (skipEntityCallbacks()) {
|
|
||||||
return Mono.just(entity);
|
|
||||||
}
|
|
||||||
|
|
||||||
return entityCallbacks.callback(callbackType, entity, document, collectionName);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void publishEvent(ApplicationEvent event) {
|
|
||||||
|
|
||||||
if (skipEventPublishing()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
eventPublisher.publishEvent(event);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -22,7 +22,6 @@ import java.util.Collection;
|
|||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
|
||||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||||
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
import org.springframework.data.mongodb.core.index.IndexDefinition;
|
||||||
import org.springframework.data.mongodb.core.index.IndexInfo;
|
import org.springframework.data.mongodb.core.index.IndexInfo;
|
||||||
@@ -30,7 +29,6 @@ import org.springframework.data.mongodb.core.index.ReactiveIndexOperations;
|
|||||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||||
import org.springframework.lang.Nullable;
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.util.NumberUtils;
|
|
||||||
|
|
||||||
import com.mongodb.client.model.IndexOptions;
|
import com.mongodb.client.model.IndexOptions;
|
||||||
|
|
||||||
@@ -78,9 +76,9 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
|||||||
private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName,
|
private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName,
|
||||||
QueryMapper queryMapper, Optional<Class<?>> type) {
|
QueryMapper queryMapper, Optional<Class<?>> type) {
|
||||||
|
|
||||||
Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null");
|
Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!");
|
||||||
Assert.notNull(collectionName, "Collection must not be null");
|
Assert.notNull(collectionName, "Collection must not be null!");
|
||||||
Assert.notNull(queryMapper, "QueryMapper must not be null");
|
Assert.notNull(queryMapper, "QueryMapper must not be null!");
|
||||||
|
|
||||||
this.mongoOperations = mongoOperations;
|
this.mongoOperations = mongoOperations;
|
||||||
this.collectionName = collectionName;
|
this.collectionName = collectionName;
|
||||||
@@ -88,6 +86,10 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
|||||||
this.type = type;
|
this.type = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition)
|
||||||
|
*/
|
||||||
public Mono<String> ensureIndex(final IndexDefinition indexDefinition) {
|
public Mono<String> ensureIndex(final IndexDefinition indexDefinition) {
|
||||||
|
|
||||||
return mongoOperations.execute(collectionName, collection -> {
|
return mongoOperations.execute(collectionName, collection -> {
|
||||||
@@ -106,22 +108,6 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
|||||||
}).next();
|
}).next();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<Void> alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) {
|
|
||||||
|
|
||||||
return mongoOperations.execute(db -> {
|
|
||||||
Document indexOptions = new Document("name", name);
|
|
||||||
indexOptions.putAll(options.toDocument());
|
|
||||||
|
|
||||||
return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions)))
|
|
||||||
.doOnNext(result -> {
|
|
||||||
if(NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) {
|
|
||||||
throw new UncategorizedMongoDbException("Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}).then();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private MongoPersistentEntity<?> lookupPersistentEntity(String collection) {
|
private MongoPersistentEntity<?> lookupPersistentEntity(String collection) {
|
||||||
|
|
||||||
@@ -133,14 +119,26 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
|||||||
.orElse(null);
|
.orElse(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String)
|
||||||
|
*/
|
||||||
public Mono<Void> dropIndex(final String name) {
|
public Mono<Void> dropIndex(final String name) {
|
||||||
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes()
|
||||||
|
*/
|
||||||
public Mono<Void> dropAllIndexes() {
|
public Mono<Void> dropAllIndexes() {
|
||||||
return dropIndex("*");
|
return dropIndex("*");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo()
|
||||||
|
*/
|
||||||
public Flux<IndexInfo> getIndexInfo() {
|
public Flux<IndexInfo> getIndexInfo() {
|
||||||
|
|
||||||
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ import org.bson.types.ObjectId;
|
|||||||
import org.springframework.dao.DataAccessException;
|
import org.springframework.dao.DataAccessException;
|
||||||
import org.springframework.data.mongodb.core.script.ExecutableMongoScript;
|
import org.springframework.data.mongodb.core.script.ExecutableMongoScript;
|
||||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||||
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.util.CollectionUtils;
|
import org.springframework.util.CollectionUtils;
|
||||||
import org.springframework.util.ObjectUtils;
|
import org.springframework.util.ObjectUtils;
|
||||||
@@ -64,29 +65,41 @@ class DefaultScriptOperations implements ScriptOperations {
|
|||||||
*/
|
*/
|
||||||
public DefaultScriptOperations(MongoOperations mongoOperations) {
|
public DefaultScriptOperations(MongoOperations mongoOperations) {
|
||||||
|
|
||||||
Assert.notNull(mongoOperations, "MongoOperations must not be null");
|
Assert.notNull(mongoOperations, "MongoOperations must not be null!");
|
||||||
|
|
||||||
this.mongoOperations = mongoOperations;
|
this.mongoOperations = mongoOperations;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.ExecutableMongoScript)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NamedMongoScript register(ExecutableMongoScript script) {
|
public NamedMongoScript register(ExecutableMongoScript script) {
|
||||||
return register(new NamedMongoScript(generateScriptName(), script));
|
return register(new NamedMongoScript(generateScriptName(), script));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.NamedMongoScript)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public NamedMongoScript register(NamedMongoScript script) {
|
public NamedMongoScript register(NamedMongoScript script) {
|
||||||
|
|
||||||
Assert.notNull(script, "Script must not be null");
|
Assert.notNull(script, "Script must not be null!");
|
||||||
|
|
||||||
mongoOperations.save(script, SCRIPT_COLLECTION_NAME);
|
mongoOperations.save(script, SCRIPT_COLLECTION_NAME);
|
||||||
return script;
|
return script;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ScriptOperations#execute(org.springframework.data.mongodb.core.script.ExecutableMongoScript, java.lang.Object[])
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Object execute(final ExecutableMongoScript script, final Object... args) {
|
public Object execute(final ExecutableMongoScript script, final Object... args) {
|
||||||
|
|
||||||
Assert.notNull(script, "Script must not be null");
|
Assert.notNull(script, "Script must not be null!");
|
||||||
|
|
||||||
return mongoOperations.execute(new DbCallback<Object>() {
|
return mongoOperations.execute(new DbCallback<Object>() {
|
||||||
|
|
||||||
@@ -102,10 +115,14 @@ class DefaultScriptOperations implements ScriptOperations {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ScriptOperations#call(java.lang.String, java.lang.Object[])
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Object call(final String scriptName, final Object... args) {
|
public Object call(final String scriptName, final Object... args) {
|
||||||
|
|
||||||
Assert.hasText(scriptName, "ScriptName must not be null or empty");
|
Assert.hasText(scriptName, "ScriptName must not be null or empty!");
|
||||||
|
|
||||||
return mongoOperations.execute(new DbCallback<Object>() {
|
return mongoOperations.execute(new DbCallback<Object>() {
|
||||||
|
|
||||||
@@ -118,14 +135,22 @@ class DefaultScriptOperations implements ScriptOperations {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ScriptOperations#exists(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean exists(String scriptName) {
|
public boolean exists(String scriptName) {
|
||||||
|
|
||||||
Assert.hasText(scriptName, "ScriptName must not be null or empty");
|
Assert.hasText(scriptName, "ScriptName must not be null or empty!");
|
||||||
|
|
||||||
return mongoOperations.exists(query(where("_id").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME);
|
return mongoOperations.exists(query(where("_id").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ScriptOperations#getScriptNames()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Set<String> getScriptNames() {
|
public Set<String> getScriptNames() {
|
||||||
|
|
||||||
@@ -150,7 +175,7 @@ class DefaultScriptOperations implements ScriptOperations {
|
|||||||
return args;
|
return args;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Object> convertedValues = new ArrayList<>(args.length);
|
List<Object> convertedValues = new ArrayList<Object>(args.length);
|
||||||
|
|
||||||
for (Object arg : args) {
|
for (Object arg : args) {
|
||||||
convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg)
|
convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg)
|
||||||
|
|||||||
@@ -1,60 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2022-2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.core;
|
|
||||||
|
|
||||||
import org.springframework.context.ApplicationEventPublisher;
|
|
||||||
import org.springframework.lang.Nullable;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delegate class to encapsulate lifecycle event configuration and publishing.
|
|
||||||
*
|
|
||||||
* @author Mark Paluch
|
|
||||||
* @since 4.0
|
|
||||||
* @see ApplicationEventPublisher
|
|
||||||
*/
|
|
||||||
class EntityLifecycleEventDelegate {
|
|
||||||
|
|
||||||
private @Nullable ApplicationEventPublisher publisher;
|
|
||||||
private boolean eventsEnabled = true;
|
|
||||||
|
|
||||||
public void setPublisher(@Nullable ApplicationEventPublisher publisher) {
|
|
||||||
this.publisher = publisher;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isEventsEnabled() {
|
|
||||||
return eventsEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setEventsEnabled(boolean eventsEnabled) {
|
|
||||||
this.eventsEnabled = eventsEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Publish an application event if event publishing is enabled.
|
|
||||||
*
|
|
||||||
* @param event the application event.
|
|
||||||
*/
|
|
||||||
public void publishEvent(Object event) {
|
|
||||||
|
|
||||||
if (canPublishEvent()) {
|
|
||||||
publisher.publishEvent(event);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean canPublishEvent() {
|
|
||||||
return publisher != null && eventsEnabled;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -17,12 +17,11 @@ package org.springframework.data.mongodb.core;
|
|||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.bson.BsonNull;
|
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
|
|
||||||
import org.springframework.core.convert.ConversionService;
|
import org.springframework.core.convert.ConversionService;
|
||||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||||
import org.springframework.data.convert.CustomConversions;
|
import org.springframework.data.convert.CustomConversions;
|
||||||
@@ -30,8 +29,6 @@ import org.springframework.data.mapping.IdentifierAccessor;
|
|||||||
import org.springframework.data.mapping.MappingException;
|
import org.springframework.data.mapping.MappingException;
|
||||||
import org.springframework.data.mapping.PersistentEntity;
|
import org.springframework.data.mapping.PersistentEntity;
|
||||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
|
||||||
import org.springframework.data.mapping.PropertyPath;
|
|
||||||
import org.springframework.data.mapping.context.MappingContext;
|
import org.springframework.data.mapping.context.MappingContext;
|
||||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||||
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
|
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
|
||||||
@@ -48,11 +45,9 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
|||||||
import org.springframework.data.mongodb.core.query.Query;
|
import org.springframework.data.mongodb.core.query.Query;
|
||||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||||
import org.springframework.data.mongodb.core.validation.Validator;
|
import org.springframework.data.mongodb.core.validation.Validator;
|
||||||
import org.springframework.data.mongodb.util.BsonUtils;
|
|
||||||
import org.springframework.data.projection.EntityProjection;
|
import org.springframework.data.projection.EntityProjection;
|
||||||
import org.springframework.data.projection.EntityProjectionIntrospector;
|
import org.springframework.data.projection.EntityProjectionIntrospector;
|
||||||
import org.springframework.data.projection.ProjectionFactory;
|
import org.springframework.data.projection.ProjectionFactory;
|
||||||
import org.springframework.data.projection.TargetAware;
|
|
||||||
import org.springframework.data.util.Optionals;
|
import org.springframework.data.util.Optionals;
|
||||||
import org.springframework.lang.Nullable;
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
@@ -62,7 +57,6 @@ import org.springframework.util.MultiValueMap;
|
|||||||
import org.springframework.util.ObjectUtils;
|
import org.springframework.util.ObjectUtils;
|
||||||
import org.springframework.util.StringUtils;
|
import org.springframework.util.StringUtils;
|
||||||
|
|
||||||
import com.mongodb.client.model.ChangeStreamPreAndPostImagesOptions;
|
|
||||||
import com.mongodb.client.model.CreateCollectionOptions;
|
import com.mongodb.client.model.CreateCollectionOptions;
|
||||||
import com.mongodb.client.model.TimeSeriesGranularity;
|
import com.mongodb.client.model.TimeSeriesGranularity;
|
||||||
import com.mongodb.client.model.ValidationOptions;
|
import com.mongodb.client.model.ValidationOptions;
|
||||||
@@ -118,21 +112,17 @@ class EntityOperations {
|
|||||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
<T> Entity<T> forEntity(T entity) {
|
<T> Entity<T> forEntity(T entity) {
|
||||||
|
|
||||||
Assert.notNull(entity, "Bean must not be null");
|
Assert.notNull(entity, "Bean must not be null!");
|
||||||
|
|
||||||
if (entity instanceof TargetAware targetAware) {
|
|
||||||
return new SimpleMappedEntity((Map<String, Object>) targetAware.getTarget(), this);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (entity instanceof String) {
|
if (entity instanceof String) {
|
||||||
return new UnmappedEntity(parse(entity.toString()), this);
|
return new UnmappedEntity(parse(entity.toString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (entity instanceof Map) {
|
if (entity instanceof Map) {
|
||||||
return new SimpleMappedEntity((Map<String, Object>) entity, this);
|
return new SimpleMappedEntity((Map<String, Object>) entity);
|
||||||
}
|
}
|
||||||
|
|
||||||
return MappedEntity.of(entity, context, this);
|
return MappedEntity.of(entity, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -145,18 +135,18 @@ class EntityOperations {
|
|||||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||||
<T> AdaptibleEntity<T> forEntity(T entity, ConversionService conversionService) {
|
<T> AdaptibleEntity<T> forEntity(T entity, ConversionService conversionService) {
|
||||||
|
|
||||||
Assert.notNull(entity, "Bean must not be null");
|
Assert.notNull(entity, "Bean must not be null!");
|
||||||
Assert.notNull(conversionService, "ConversionService must not be null");
|
Assert.notNull(conversionService, "ConversionService must not be null!");
|
||||||
|
|
||||||
if (entity instanceof String) {
|
if (entity instanceof String) {
|
||||||
return new UnmappedEntity(parse(entity.toString()), this);
|
return new UnmappedEntity(parse(entity.toString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (entity instanceof Map) {
|
if (entity instanceof Map) {
|
||||||
return new SimpleMappedEntity((Map<String, Object>) entity, this);
|
return new SimpleMappedEntity((Map<String, Object>) entity);
|
||||||
}
|
}
|
||||||
|
|
||||||
return AdaptibleMappedEntity.of(entity, context, conversionService, this);
|
return AdaptibleMappedEntity.of(entity, context, conversionService);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -181,7 +171,7 @@ class EntityOperations {
|
|||||||
|
|
||||||
if (entityClass == null) {
|
if (entityClass == null) {
|
||||||
throw new InvalidDataAccessApiUsageException(
|
throw new InvalidDataAccessApiUsageException(
|
||||||
"No class parameter provided, entity collection can't be determined");
|
"No class parameter provided, entity collection can't be determined!");
|
||||||
}
|
}
|
||||||
|
|
||||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(entityClass);
|
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(entityClass);
|
||||||
@@ -218,7 +208,7 @@ class EntityOperations {
|
|||||||
*/
|
*/
|
||||||
public String getIdPropertyName(Class<?> type) {
|
public String getIdPropertyName(Class<?> type) {
|
||||||
|
|
||||||
Assert.notNull(type, "Type must not be null");
|
Assert.notNull(type, "Type must not be null!");
|
||||||
|
|
||||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(type);
|
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(type);
|
||||||
|
|
||||||
@@ -257,12 +247,12 @@ class EntityOperations {
|
|||||||
try {
|
try {
|
||||||
return Document.parse(source);
|
return Document.parse(source);
|
||||||
} catch (org.bson.json.JsonParseException o_O) {
|
} catch (org.bson.json.JsonParseException o_O) {
|
||||||
throw new MappingException("Could not parse given String to save into a JSON document", o_O);
|
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||||
} catch (RuntimeException o_O) {
|
} catch (RuntimeException o_O) {
|
||||||
|
|
||||||
// legacy 3.x exception
|
// legacy 3.x exception
|
||||||
if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) {
|
if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) {
|
||||||
throw new MappingException("Could not parse given String to save into a JSON document", o_O);
|
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||||
}
|
}
|
||||||
throw o_O;
|
throw o_O;
|
||||||
}
|
}
|
||||||
@@ -293,21 +283,16 @@ class EntityOperations {
|
|||||||
* @see EntityProjectionIntrospector#introspect(Class, Class)
|
* @see EntityProjectionIntrospector#introspect(Class, Class)
|
||||||
*/
|
*/
|
||||||
public <M, D> EntityProjection<M, D> introspectProjection(Class<M> resultType, Class<D> entityType) {
|
public <M, D> EntityProjection<M, D> introspectProjection(Class<M> resultType, Class<D> entityType) {
|
||||||
|
|
||||||
MongoPersistentEntity<?> persistentEntity = queryMapper.getMappingContext().getPersistentEntity(entityType);
|
|
||||||
if (persistentEntity == null && !resultType.isInterface() || ClassUtils.isAssignable(Document.class, resultType)) {
|
|
||||||
return (EntityProjection) EntityProjection.nonProjecting(resultType);
|
|
||||||
}
|
|
||||||
return introspector.introspect(resultType, entityType);
|
return introspector.introspect(resultType, entityType);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert {@link CollectionOptions} to {@link CreateCollectionOptions} using {@link Class entityType} to obtain
|
* Convert given {@link CollectionOptions} to a document and take the domain type information into account when
|
||||||
* mapping metadata.
|
* creating a mapped schema for validation.
|
||||||
*
|
*
|
||||||
* @param collectionOptions
|
* @param collectionOptions can be {@literal null}.
|
||||||
* @param entityType
|
* @param entityType must not be {@literal null}. Use {@link Object} type instead.
|
||||||
* @return
|
* @return the converted {@link CreateCollectionOptions}.
|
||||||
* @since 3.4
|
* @since 3.4
|
||||||
*/
|
*/
|
||||||
public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions,
|
public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions,
|
||||||
@@ -356,9 +341,6 @@ class EntityOperations {
|
|||||||
result.timeSeriesOptions(options);
|
result.timeSeriesOptions(options);
|
||||||
});
|
});
|
||||||
|
|
||||||
collectionOptions.getChangeStreamOptions().ifPresent(it -> result
|
|
||||||
.changeStreamPreAndPostImagesOptions(new ChangeStreamPreAndPostImagesOptions(it.getPreAndPostImages())));
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -377,7 +359,6 @@ class EntityOperations {
|
|||||||
* A representation of information about an entity.
|
* A representation of information about an entity.
|
||||||
*
|
*
|
||||||
* @author Oliver Gierke
|
* @author Oliver Gierke
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 2.1
|
* @since 2.1
|
||||||
*/
|
*/
|
||||||
interface Entity<T> {
|
interface Entity<T> {
|
||||||
@@ -396,16 +377,6 @@ class EntityOperations {
|
|||||||
*/
|
*/
|
||||||
Object getId();
|
Object getId();
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the property value for {@code key}.
|
|
||||||
*
|
|
||||||
* @param key
|
|
||||||
* @return
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
@Nullable
|
|
||||||
Object getPropertyValue(String key);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the {@link Query} to find the entity by its identifier.
|
* Returns the {@link Query} to find the entity by its identifier.
|
||||||
*
|
*
|
||||||
@@ -476,15 +447,6 @@ class EntityOperations {
|
|||||||
* @since 2.1.2
|
* @since 2.1.2
|
||||||
*/
|
*/
|
||||||
boolean isNew();
|
boolean isNew();
|
||||||
|
|
||||||
/**
|
|
||||||
* @param sortObject
|
|
||||||
* @return
|
|
||||||
* @since 4.1
|
|
||||||
* @throws IllegalStateException if a sort key yields {@literal null}.
|
|
||||||
*/
|
|
||||||
Map<String, Object> extractKeys(Document sortObject, Class<?> sourceType);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -506,7 +468,7 @@ class EntityOperations {
|
|||||||
T populateIdIfNecessary(@Nullable Object id);
|
T populateIdIfNecessary(@Nullable Object id);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initializes the version property of the current entity if available.
|
* Initializes the version property of the of the current entity if available.
|
||||||
*
|
*
|
||||||
* @return the entity with the version property updated if available.
|
* @return the entity with the version property updated if available.
|
||||||
*/
|
*/
|
||||||
@@ -532,33 +494,42 @@ class EntityOperations {
|
|||||||
private static class UnmappedEntity<T extends Map<String, Object>> implements AdaptibleEntity<T> {
|
private static class UnmappedEntity<T extends Map<String, Object>> implements AdaptibleEntity<T> {
|
||||||
|
|
||||||
private final T map;
|
private final T map;
|
||||||
private final EntityOperations entityOperations;
|
|
||||||
|
|
||||||
protected UnmappedEntity(T map, EntityOperations entityOperations) {
|
protected UnmappedEntity(T map) {
|
||||||
this.map = map;
|
this.map = map;
|
||||||
this.entityOperations = entityOperations;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String getIdFieldName() {
|
public String getIdFieldName() {
|
||||||
return ID_FIELD;
|
return ID_FIELD;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Object getId() {
|
public Object getId() {
|
||||||
return getPropertyValue(ID_FIELD);
|
return map.get(ID_FIELD);
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object getPropertyValue(String key) {
|
|
||||||
return map.get(key);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Query getByIdQuery() {
|
public Query getByIdQuery() {
|
||||||
return Query.query(Criteria.where(ID_FIELD).is(map.get(ID_FIELD)));
|
return Query.query(Criteria.where(ID_FIELD).is(map.get(ID_FIELD)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#populateIdIfNecessary(java.lang.Object)
|
||||||
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public T populateIdIfNecessary(@Nullable Object id) {
|
public T populateIdIfNecessary(@Nullable Object id) {
|
||||||
@@ -568,96 +539,90 @@ class EntityOperations {
|
|||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Query getQueryForVersion() {
|
public Query getQueryForVersion() {
|
||||||
throw new MappingException("Cannot query for version on plain Documents");
|
throw new MappingException("Cannot query for version on plain Documents!");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
||||||
return MappedDocument.of(map instanceof Document document //
|
return MappedDocument.of(map instanceof Document //
|
||||||
? document //
|
? (Document) map //
|
||||||
: new Document(map));
|
: new Document(map));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#initializeVersionProperty()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T initializeVersionProperty() {
|
public T initializeVersionProperty() {
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#getVersion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
@Nullable
|
@Nullable
|
||||||
public Number getVersion() {
|
public Number getVersion() {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#incrementVersion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T incrementVersion() {
|
public T incrementVersion() {
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T getBean() {
|
public T getBean() {
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isNew() {
|
public boolean isNew() {
|
||||||
return map.get(ID_FIELD) != null;
|
return map.get(ID_FIELD) != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<String, Object> extractKeys(Document sortObject, Class<?> sourceType) {
|
|
||||||
|
|
||||||
Map<String, Object> keyset = new LinkedHashMap<>();
|
|
||||||
MongoPersistentEntity<?> sourceEntity = entityOperations.context.getPersistentEntity(sourceType);
|
|
||||||
if (sourceEntity != null && sourceEntity.hasIdProperty()) {
|
|
||||||
keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId());
|
|
||||||
} else {
|
|
||||||
keyset.put(ID_FIELD, getId());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (String key : sortObject.keySet()) {
|
|
||||||
|
|
||||||
Object value = resolveValue(key, sourceEntity);
|
|
||||||
|
|
||||||
if (value == null) {
|
|
||||||
throw new IllegalStateException(
|
|
||||||
String.format("Cannot extract value for key %s because its value is null", key));
|
|
||||||
}
|
|
||||||
|
|
||||||
keyset.put(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
return keyset;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable
|
|
||||||
private Object resolveValue(String key, @Nullable MongoPersistentEntity<?> sourceEntity) {
|
|
||||||
|
|
||||||
if (sourceEntity == null) {
|
|
||||||
return BsonUtils.resolveValue(map, key);
|
|
||||||
}
|
|
||||||
PropertyPath from = PropertyPath.from(key, sourceEntity.getTypeInformation());
|
|
||||||
PersistentPropertyPath<MongoPersistentProperty> persistentPropertyPath = entityOperations.context
|
|
||||||
.getPersistentPropertyPath(from);
|
|
||||||
return BsonUtils.resolveValue(map, persistentPropertyPath.toDotPath(p -> p.getFieldName()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class SimpleMappedEntity<T extends Map<String, Object>> extends UnmappedEntity<T> {
|
private static class SimpleMappedEntity<T extends Map<String, Object>> extends UnmappedEntity<T> {
|
||||||
|
|
||||||
protected SimpleMappedEntity(T map, EntityOperations entityOperations) {
|
protected SimpleMappedEntity(T map) {
|
||||||
super(map, entityOperations);
|
super(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
||||||
|
|
||||||
T bean = getBean();
|
T bean = getBean();
|
||||||
bean = (T) (bean instanceof Document document//
|
bean = (T) (bean instanceof Document //
|
||||||
? document //
|
? (Document) bean //
|
||||||
: new Document(bean));
|
: new Document(bean));
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
writer.write(bean, document);
|
writer.write(bean, document);
|
||||||
@@ -671,48 +636,52 @@ class EntityOperations {
|
|||||||
private final MongoPersistentEntity<?> entity;
|
private final MongoPersistentEntity<?> entity;
|
||||||
private final IdentifierAccessor idAccessor;
|
private final IdentifierAccessor idAccessor;
|
||||||
private final PersistentPropertyAccessor<T> propertyAccessor;
|
private final PersistentPropertyAccessor<T> propertyAccessor;
|
||||||
private final EntityOperations entityOperations;
|
|
||||||
|
|
||||||
protected MappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor idAccessor,
|
protected MappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor idAccessor,
|
||||||
PersistentPropertyAccessor<T> propertyAccessor, EntityOperations entityOperations) {
|
PersistentPropertyAccessor<T> propertyAccessor) {
|
||||||
|
|
||||||
this.entity = entity;
|
this.entity = entity;
|
||||||
this.idAccessor = idAccessor;
|
this.idAccessor = idAccessor;
|
||||||
this.propertyAccessor = propertyAccessor;
|
this.propertyAccessor = propertyAccessor;
|
||||||
this.entityOperations = entityOperations;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T> MappedEntity<T> of(T bean,
|
private static <T> MappedEntity<T> of(T bean,
|
||||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||||
EntityOperations entityOperations) {
|
|
||||||
|
|
||||||
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
|
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
|
||||||
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
|
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
|
||||||
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
|
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
|
||||||
|
|
||||||
return new MappedEntity<>(entity, identifierAccessor, propertyAccessor, entityOperations);
|
return new MappedEntity<>(entity, identifierAccessor, propertyAccessor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String getIdFieldName() {
|
public String getIdFieldName() {
|
||||||
return entity.getRequiredIdProperty().getFieldName();
|
return entity.getRequiredIdProperty().getFieldName();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Object getId() {
|
public Object getId() {
|
||||||
return idAccessor.getRequiredIdentifier();
|
return idAccessor.getRequiredIdentifier();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
/*
|
||||||
public Object getPropertyValue(String key) {
|
* (non-Javadoc)
|
||||||
return propertyAccessor.getProperty(entity.getRequiredPersistentProperty(key));
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery()
|
||||||
}
|
*/
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query getByIdQuery() {
|
public Query getByIdQuery() {
|
||||||
|
|
||||||
if (!entity.hasIdProperty()) {
|
if (!entity.hasIdProperty()) {
|
||||||
throw new MappingException("No id property found for object of type " + entity.getType());
|
throw new MappingException("No id property found for object of type " + entity.getType() + "!");
|
||||||
}
|
}
|
||||||
|
|
||||||
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
||||||
@@ -720,6 +689,10 @@ class EntityOperations {
|
|||||||
return Query.query(Criteria.where(idProperty.getName()).is(getId()));
|
return Query.query(Criteria.where(idProperty.getName()).is(getId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Query getQueryForVersion() {
|
public Query getQueryForVersion() {
|
||||||
|
|
||||||
@@ -730,6 +703,10 @@ class EntityOperations {
|
|||||||
.and(versionProperty.getName()).is(getVersion()));
|
.and(versionProperty.getName()).is(getVersion()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
public MappedDocument toMappedDocument(MongoWriter<? super T> writer) {
|
||||||
|
|
||||||
@@ -745,6 +722,10 @@ class EntityOperations {
|
|||||||
return MappedDocument.of(document);
|
return MappedDocument.of(document);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#assertUpdateableIdIfNotSet()
|
||||||
|
*/
|
||||||
public void assertUpdateableIdIfNotSet() {
|
public void assertUpdateableIdIfNotSet() {
|
||||||
|
|
||||||
if (!entity.hasIdProperty()) {
|
if (!entity.hasIdProperty()) {
|
||||||
@@ -760,85 +741,47 @@ class EntityOperations {
|
|||||||
|
|
||||||
if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) {
|
if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) {
|
||||||
throw new InvalidDataAccessApiUsageException(
|
throw new InvalidDataAccessApiUsageException(
|
||||||
String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(),
|
String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(),
|
||||||
entity.getType().getName()));
|
entity.getType().getName()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#isVersionedEntity()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isVersionedEntity() {
|
public boolean isVersionedEntity() {
|
||||||
return entity.hasVersionProperty();
|
return entity.hasVersionProperty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getVersion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
@Nullable
|
@Nullable
|
||||||
public Object getVersion() {
|
public Object getVersion() {
|
||||||
return propertyAccessor.getProperty(entity.getRequiredVersionProperty());
|
return propertyAccessor.getProperty(entity.getRequiredVersionProperty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T getBean() {
|
public T getBean() {
|
||||||
return propertyAccessor.getBean();
|
return propertyAccessor.getBean();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isNew() {
|
public boolean isNew() {
|
||||||
return entity.isNew(propertyAccessor.getBean());
|
return entity.isNew(propertyAccessor.getBean());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<String, Object> extractKeys(Document sortObject, Class<?> sourceType) {
|
|
||||||
|
|
||||||
Map<String, Object> keyset = new LinkedHashMap<>();
|
|
||||||
MongoPersistentEntity<?> sourceEntity = entityOperations.context.getPersistentEntity(sourceType);
|
|
||||||
if (sourceEntity != null && sourceEntity.hasIdProperty()) {
|
|
||||||
keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId());
|
|
||||||
} else {
|
|
||||||
keyset.put(entity.getRequiredIdProperty().getName(), getId());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (String key : sortObject.keySet()) {
|
|
||||||
|
|
||||||
Object value;
|
|
||||||
if (key.indexOf('.') != -1) {
|
|
||||||
|
|
||||||
// follow the path across nested levels.
|
|
||||||
// TODO: We should have a MongoDB-specific property path abstraction to allow diving into Document.
|
|
||||||
value = getNestedPropertyValue(key);
|
|
||||||
} else {
|
|
||||||
value = getPropertyValue(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value == null) {
|
|
||||||
throw new IllegalStateException(
|
|
||||||
String.format("Cannot extract value for key %s because its value is null", key));
|
|
||||||
}
|
|
||||||
|
|
||||||
keyset.put(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
return keyset;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Nullable
|
|
||||||
private Object getNestedPropertyValue(String key) {
|
|
||||||
|
|
||||||
String[] segments = key.split("\\.");
|
|
||||||
Entity<?> currentEntity = this;
|
|
||||||
Object currentValue = BsonNull.VALUE;
|
|
||||||
|
|
||||||
for (int i = 0; i < segments.length; i++) {
|
|
||||||
|
|
||||||
String segment = segments[i];
|
|
||||||
currentValue = currentEntity.getPropertyValue(segment);
|
|
||||||
|
|
||||||
if (i < segments.length - 1) {
|
|
||||||
currentEntity = entityOperations.forEntity(currentValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return currentValue != null ? currentValue : BsonNull.VALUE;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class AdaptibleMappedEntity<T> extends MappedEntity<T> implements AdaptibleEntity<T> {
|
private static class AdaptibleMappedEntity<T> extends MappedEntity<T> implements AdaptibleEntity<T> {
|
||||||
@@ -848,9 +791,9 @@ class EntityOperations {
|
|||||||
private final IdentifierAccessor identifierAccessor;
|
private final IdentifierAccessor identifierAccessor;
|
||||||
|
|
||||||
private AdaptibleMappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor identifierAccessor,
|
private AdaptibleMappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor identifierAccessor,
|
||||||
ConvertingPropertyAccessor<T> propertyAccessor, EntityOperations entityOperations) {
|
ConvertingPropertyAccessor<T> propertyAccessor) {
|
||||||
|
|
||||||
super(entity, identifierAccessor, propertyAccessor, entityOperations);
|
super(entity, identifierAccessor, propertyAccessor);
|
||||||
|
|
||||||
this.entity = entity;
|
this.entity = entity;
|
||||||
this.propertyAccessor = propertyAccessor;
|
this.propertyAccessor = propertyAccessor;
|
||||||
@@ -859,16 +802,20 @@ class EntityOperations {
|
|||||||
|
|
||||||
private static <T> AdaptibleEntity<T> of(T bean,
|
private static <T> AdaptibleEntity<T> of(T bean,
|
||||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||||
ConversionService conversionService, EntityOperations entityOperations) {
|
ConversionService conversionService) {
|
||||||
|
|
||||||
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
|
MongoPersistentEntity<?> entity = context.getRequiredPersistentEntity(bean.getClass());
|
||||||
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
|
IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean);
|
||||||
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
|
PersistentPropertyAccessor<T> propertyAccessor = entity.getPropertyAccessor(bean);
|
||||||
|
|
||||||
return new AdaptibleMappedEntity<>(entity, identifierAccessor,
|
return new AdaptibleMappedEntity<>(entity, identifierAccessor,
|
||||||
new ConvertingPropertyAccessor<>(propertyAccessor, conversionService), entityOperations);
|
new ConvertingPropertyAccessor<>(propertyAccessor, conversionService));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#populateIdIfNecessary(java.lang.Object)
|
||||||
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
@Override
|
@Override
|
||||||
public T populateIdIfNecessary(@Nullable Object id) {
|
public T populateIdIfNecessary(@Nullable Object id) {
|
||||||
@@ -890,6 +837,10 @@ class EntityOperations {
|
|||||||
return propertyAccessor.getBean();
|
return propertyAccessor.getBean();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.MappedEntity#getVersion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
@Nullable
|
@Nullable
|
||||||
public Number getVersion() {
|
public Number getVersion() {
|
||||||
@@ -899,6 +850,10 @@ class EntityOperations {
|
|||||||
return propertyAccessor.getProperty(versionProperty, Number.class);
|
return propertyAccessor.getProperty(versionProperty, Number.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#initializeVersionProperty()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T initializeVersionProperty() {
|
public T initializeVersionProperty() {
|
||||||
|
|
||||||
@@ -913,6 +868,10 @@ class EntityOperations {
|
|||||||
return propertyAccessor.getBean();
|
return propertyAccessor.getBean();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#incrementVersion()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T incrementVersion() {
|
public T incrementVersion() {
|
||||||
|
|
||||||
@@ -967,14 +926,6 @@ class EntityOperations {
|
|||||||
* @since 3.3
|
* @since 3.3
|
||||||
*/
|
*/
|
||||||
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
|
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the name of the id field.
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
default String getIdKeyName() {
|
|
||||||
return ID_FIELD;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -992,11 +943,19 @@ class EntityOperations {
|
|||||||
return (TypedOperations) INSTANCE;
|
return (TypedOperations) INSTANCE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Optional<Collation> getCollation() {
|
public Optional<Collation> getCollation() {
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Optional<Collation> getCollation(Query query) {
|
public Optional<Collation> getCollation(Query query) {
|
||||||
|
|
||||||
@@ -1031,11 +990,19 @@ class EntityOperations {
|
|||||||
this.entity = entity;
|
this.entity = entity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Optional<Collation> getCollation() {
|
public Optional<Collation> getCollation() {
|
||||||
return Optional.ofNullable(entity.getCollation());
|
return Optional.ofNullable(entity.getCollation());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Optional<Collation> getCollation(Query query) {
|
public Optional<Collation> getCollation(Query query) {
|
||||||
|
|
||||||
@@ -1097,11 +1064,6 @@ class EntityOperations {
|
|||||||
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
|
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
|
||||||
return persistentProperty != null ? persistentProperty.getFieldName() : name;
|
return persistentProperty != null ? persistentProperty.getFieldName() : name;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getIdKeyName() {
|
|
||||||
return entity.getIdProperty().getName();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,10 +15,9 @@
|
|||||||
*/
|
*/
|
||||||
package org.springframework.data.mongodb.core;
|
package org.springframework.data.mongodb.core;
|
||||||
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||||
|
import org.springframework.data.util.CloseableIterator;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent
|
* {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent
|
||||||
@@ -89,12 +88,12 @@ public interface ExecutableAggregationOperation {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Apply pipeline operations as specified and stream all matching elements. <br />
|
* Apply pipeline operations as specified and stream all matching elements. <br />
|
||||||
* Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable}
|
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable}
|
||||||
*
|
*
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed.
|
||||||
* through a try-with-resources clause).
|
* Never {@literal null}.
|
||||||
*/
|
*/
|
||||||
Stream<T> stream();
|
CloseableIterator<T> stream();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -15,11 +15,10 @@
|
|||||||
*/
|
*/
|
||||||
package org.springframework.data.mongodb.core;
|
package org.springframework.data.mongodb.core;
|
||||||
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||||
|
import org.springframework.data.util.CloseableIterator;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.util.StringUtils;
|
import org.springframework.util.StringUtils;
|
||||||
|
|
||||||
@@ -38,10 +37,14 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper
|
|||||||
this.template = template;
|
this.template = template;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <T> ExecutableAggregation<T> aggregateAndReturn(Class<T> domainType) {
|
public <T> ExecutableAggregation<T> aggregateAndReturn(Class<T> domainType) {
|
||||||
|
|
||||||
Assert.notNull(domainType, "DomainType must not be null");
|
Assert.notNull(domainType, "DomainType must not be null!");
|
||||||
|
|
||||||
return new ExecutableAggregationSupport<>(template, domainType, null, null);
|
return new ExecutableAggregationSupport<>(template, domainType, null, null);
|
||||||
}
|
}
|
||||||
@@ -66,29 +69,45 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper
|
|||||||
this.collection = collection;
|
this.collection = collection;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithCollection#inCollection(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public AggregationWithAggregation<T> inCollection(String collection) {
|
public AggregationWithAggregation<T> inCollection(String collection) {
|
||||||
|
|
||||||
Assert.hasText(collection, "Collection must not be null nor empty");
|
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||||
|
|
||||||
return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection);
|
return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithAggregation#by(org.springframework.data.mongodb.core.aggregation.Aggregation)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingAggregation<T> by(Aggregation aggregation) {
|
public TerminatingAggregation<T> by(Aggregation aggregation) {
|
||||||
|
|
||||||
Assert.notNull(aggregation, "Aggregation must not be null");
|
Assert.notNull(aggregation, "Aggregation must not be null!");
|
||||||
|
|
||||||
return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection);
|
return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#all()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public AggregationResults<T> all() {
|
public AggregationResults<T> all() {
|
||||||
return template.aggregate(aggregation, getCollectionName(aggregation), domainType);
|
return template.aggregate(aggregation, getCollectionName(aggregation), domainType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#stream()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Stream<T> stream() {
|
public CloseableIterator<T> stream() {
|
||||||
return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType);
|
return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -98,7 +117,9 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper
|
|||||||
return collection;
|
return collection;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (aggregation instanceof TypedAggregation typedAggregation) {
|
if (aggregation instanceof TypedAggregation) {
|
||||||
|
|
||||||
|
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
|
||||||
|
|
||||||
if (typedAggregation.getInputType() != null) {
|
if (typedAggregation.getInputType() != null) {
|
||||||
return template.getCollectionName(typedAggregation.getInputType());
|
return template.getCollectionName(typedAggregation.getInputType());
|
||||||
|
|||||||
@@ -20,9 +20,6 @@ import java.util.Optional;
|
|||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import org.springframework.dao.DataAccessException;
|
import org.springframework.dao.DataAccessException;
|
||||||
import org.springframework.data.domain.KeysetScrollPosition;
|
|
||||||
import org.springframework.data.domain.ScrollPosition;
|
|
||||||
import org.springframework.data.domain.Window;
|
|
||||||
import org.springframework.data.geo.GeoResults;
|
import org.springframework.data.geo.GeoResults;
|
||||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||||
@@ -121,34 +118,18 @@ public interface ExecutableFindOperation {
|
|||||||
/**
|
/**
|
||||||
* Stream all matching elements.
|
* Stream all matching elements.
|
||||||
*
|
*
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return a {@link Stream} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. Never
|
||||||
* through a try-with-resources clause).
|
* {@literal null}.
|
||||||
*/
|
*/
|
||||||
Stream<T> stream();
|
Stream<T> stream();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a window of elements either starting or resuming at
|
* Get the number of matching elements.
|
||||||
* {@link org.springframework.data.domain.ScrollPosition}.
|
* <br />
|
||||||
* <p>
|
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation
|
||||||
* When using {@link KeysetScrollPosition}, make sure to use non-nullable
|
* execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard,
|
||||||
* {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct
|
* session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||||
* a query result from absent document fields or {@code null} values through {@code $gt/$lt} operators.
|
* {@link MongoOperations#estimatedCount(String)} for empty queries instead.
|
||||||
*
|
|
||||||
* @param scrollPosition the scroll position.
|
|
||||||
* @return a window of the resulting elements.
|
|
||||||
* @since 4.1
|
|
||||||
* @see org.springframework.data.domain.OffsetScrollPosition
|
|
||||||
* @see org.springframework.data.domain.KeysetScrollPosition
|
|
||||||
*/
|
|
||||||
Window<T> scroll(ScrollPosition scrollPosition);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the number of matching elements. <br />
|
|
||||||
* This method uses an
|
|
||||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
|
||||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but
|
|
||||||
* guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications
|
|
||||||
* needs use {@link MongoOperations#estimatedCount(String)} for empty queries instead.
|
|
||||||
*
|
*
|
||||||
* @return total number of matching elements.
|
* @return total number of matching elements.
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -21,11 +21,11 @@ import java.util.stream.Stream;
|
|||||||
|
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||||
import org.springframework.data.domain.Window;
|
|
||||||
import org.springframework.data.domain.ScrollPosition;
|
|
||||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||||
import org.springframework.data.mongodb.core.query.Query;
|
import org.springframework.data.mongodb.core.query.Query;
|
||||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||||
|
import org.springframework.data.util.CloseableIterator;
|
||||||
|
import org.springframework.data.util.StreamUtils;
|
||||||
import org.springframework.lang.Nullable;
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.util.ObjectUtils;
|
import org.springframework.util.ObjectUtils;
|
||||||
@@ -51,10 +51,14 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
this.template = template;
|
this.template = template;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <T> ExecutableFind<T> query(Class<T> domainType) {
|
public <T> ExecutableFind<T> query(Class<T> domainType) {
|
||||||
|
|
||||||
Assert.notNull(domainType, "DomainType must not be null");
|
Assert.notNull(domainType, "DomainType must not be null!");
|
||||||
|
|
||||||
return new ExecutableFindSupport<>(template, domainType, domainType, null, ALL_QUERY);
|
return new ExecutableFindSupport<>(template, domainType, domainType, null, ALL_QUERY);
|
||||||
}
|
}
|
||||||
@@ -70,11 +74,11 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
private final MongoTemplate template;
|
private final MongoTemplate template;
|
||||||
private final Class<?> domainType;
|
private final Class<?> domainType;
|
||||||
private final Class<T> returnType;
|
private final Class<T> returnType;
|
||||||
private final @Nullable String collection;
|
@Nullable private final String collection;
|
||||||
private final Query query;
|
private final Query query;
|
||||||
|
|
||||||
ExecutableFindSupport(MongoTemplate template, Class<?> domainType, Class<T> returnType, @Nullable String collection,
|
ExecutableFindSupport(MongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||||
Query query) {
|
String collection, Query query) {
|
||||||
this.template = template;
|
this.template = template;
|
||||||
this.domainType = domainType;
|
this.domainType = domainType;
|
||||||
this.returnType = returnType;
|
this.returnType = returnType;
|
||||||
@@ -82,30 +86,46 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
this.query = query;
|
this.query = query;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithCollection#inCollection(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public FindWithProjection<T> inCollection(String collection) {
|
public FindWithProjection<T> inCollection(String collection) {
|
||||||
|
|
||||||
Assert.hasText(collection, "Collection name must not be null nor empty");
|
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||||
|
|
||||||
return new ExecutableFindSupport<>(template, domainType, returnType, collection, query);
|
return new ExecutableFindSupport<>(template, domainType, returnType, collection, query);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithProjection#as(Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <T1> FindWithQuery<T1> as(Class<T1> returnType) {
|
public <T1> FindWithQuery<T1> as(Class<T1> returnType) {
|
||||||
|
|
||||||
Assert.notNull(returnType, "ReturnType must not be null");
|
Assert.notNull(returnType, "ReturnType must not be null!");
|
||||||
|
|
||||||
return new ExecutableFindSupport<>(template, domainType, returnType, collection, query);
|
return new ExecutableFindSupport<>(template, domainType, returnType, collection, query);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#matching(org.springframework.data.mongodb.core.query.Query)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingFind<T> matching(Query query) {
|
public TerminatingFind<T> matching(Query query) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
|
|
||||||
return new ExecutableFindSupport<>(template, domainType, returnType, collection, query);
|
return new ExecutableFindSupport<>(template, domainType, returnType, collection, query);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#oneValue()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T oneValue() {
|
public T oneValue() {
|
||||||
|
|
||||||
@@ -116,12 +136,16 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (result.size() > 1) {
|
if (result.size() > 1) {
|
||||||
throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1);
|
throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
return result.iterator().next();
|
return result.iterator().next();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#firstValue()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T firstValue() {
|
public T firstValue() {
|
||||||
|
|
||||||
@@ -130,41 +154,60 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
return ObjectUtils.isEmpty(result) ? null : result.iterator().next();
|
return ObjectUtils.isEmpty(result) ? null : result.iterator().next();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#all()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<T> all() {
|
public List<T> all() {
|
||||||
return doFind(null);
|
return doFind(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#stream()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Stream<T> stream() {
|
public Stream<T> stream() {
|
||||||
return doStream();
|
return StreamUtils.createStreamFromIterator(doStream());
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Window<T> scroll(ScrollPosition scrollPosition) {
|
|
||||||
return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingFindNear<T> near(NearQuery nearQuery) {
|
public TerminatingFindNear<T> near(NearQuery nearQuery) {
|
||||||
return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType);
|
return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#count()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public long count() {
|
public long count() {
|
||||||
return template.count(query, domainType, getCollectionName());
|
return template.count(query, domainType, getCollectionName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#exists()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean exists() {
|
public boolean exists() {
|
||||||
return template.exists(query, domainType, getCollectionName());
|
return template.exists(query, domainType, getCollectionName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindDistinct#distinct(java.lang.String)
|
||||||
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public TerminatingDistinct<Object> distinct(String field) {
|
public TerminatingDistinct<Object> distinct(String field) {
|
||||||
|
|
||||||
Assert.notNull(field, "Field must not be null");
|
Assert.notNull(field, "Field must not be null!");
|
||||||
|
|
||||||
return new DistinctOperationSupport(this, field);
|
return new DistinctOperationSupport(this, field);
|
||||||
}
|
}
|
||||||
@@ -174,8 +217,8 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
Document queryObject = query.getQueryObject();
|
Document queryObject = query.getQueryObject();
|
||||||
Document fieldsObject = query.getFieldsObject();
|
Document fieldsObject = query.getFieldsObject();
|
||||||
|
|
||||||
return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType,
|
return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType,
|
||||||
returnType, getCursorPreparer(query, preparer));
|
getCursorPreparer(query, preparer));
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<T> doFindDistinct(String field) {
|
private List<T> doFindDistinct(String field) {
|
||||||
@@ -184,7 +227,7 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
returnType == domainType ? (Class<T>) Object.class : returnType);
|
returnType == domainType ? (Class<T>) Object.class : returnType);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Stream<T> doStream() {
|
private CloseableIterator<T> doStream() {
|
||||||
return template.doStream(query, domainType, getCollectionName(), returnType);
|
return template.doStream(query, domainType, getCollectionName(), returnType);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -214,6 +257,10 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.clientFindIterable)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public FindIterable<Document> prepare(FindIterable<Document> iterable) {
|
public FindIterable<Document> prepare(FindIterable<Document> iterable) {
|
||||||
|
|
||||||
@@ -248,23 +295,35 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
|||||||
this.field = field;
|
this.field = field;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithProjection#as(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public <R> TerminatingDistinct<R> as(Class<R> resultType) {
|
public <R> TerminatingDistinct<R> as(Class<R> resultType) {
|
||||||
|
|
||||||
Assert.notNull(resultType, "ResultType must not be null");
|
Assert.notNull(resultType, "ResultType must not be null!");
|
||||||
|
|
||||||
return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.as(resultType), field);
|
return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.as(resultType), field);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingDistinct<T> matching(Query query) {
|
public TerminatingDistinct<T> matching(Query query) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
|
|
||||||
return new DistinctOperationSupport<>((ExecutableFindSupport<T>) delegate.matching(query), field);
|
return new DistinctOperationSupport<>((ExecutableFindSupport<T>) delegate.matching(query), field);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingDistinct#all()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<T> all() {
|
public List<T> all() {
|
||||||
return delegate.doFindDistinct(field);
|
return delegate.doFindDistinct(field);
|
||||||
|
|||||||
@@ -40,10 +40,14 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
|||||||
this.template = template;
|
this.template = template;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.coreExecutableInsertOperation#insert(java.lan.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <T> ExecutableInsert<T> insert(Class<T> domainType) {
|
public <T> ExecutableInsert<T> insert(Class<T> domainType) {
|
||||||
|
|
||||||
Assert.notNull(domainType, "DomainType must not be null");
|
Assert.notNull(domainType, "DomainType must not be null!");
|
||||||
|
|
||||||
return new ExecutableInsertSupport<>(template, domainType, null, null);
|
return new ExecutableInsertSupport<>(template, domainType, null, null);
|
||||||
}
|
}
|
||||||
@@ -67,43 +71,63 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
|||||||
this.bulkMode = bulkMode;
|
this.bulkMode = bulkMode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#insert(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public T one(T object) {
|
public T one(T object) {
|
||||||
|
|
||||||
Assert.notNull(object, "Object must not be null");
|
Assert.notNull(object, "Object must not be null!");
|
||||||
|
|
||||||
return template.insert(object, getCollectionName());
|
return template.insert(object, getCollectionName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#all(java.util.Collection)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Collection<T> all(Collection<? extends T> objects) {
|
public Collection<T> all(Collection<? extends T> objects) {
|
||||||
|
|
||||||
Assert.notNull(objects, "Objects must not be null");
|
Assert.notNull(objects, "Objects must not be null!");
|
||||||
|
|
||||||
return template.insert(objects, getCollectionName());
|
return template.insert(objects, getCollectionName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingBulkInsert#bulk(java.util.Collection)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public BulkWriteResult bulk(Collection<? extends T> objects) {
|
public BulkWriteResult bulk(Collection<? extends T> objects) {
|
||||||
|
|
||||||
Assert.notNull(objects, "Objects must not be null");
|
Assert.notNull(objects, "Objects must not be null!");
|
||||||
|
|
||||||
return template.bulkOps(bulkMode != null ? bulkMode : BulkMode.ORDERED, domainType, getCollectionName())
|
return template.bulkOps(bulkMode != null ? bulkMode : BulkMode.ORDERED, domainType, getCollectionName())
|
||||||
.insert(new ArrayList<>(objects)).execute();
|
.insert(new ArrayList<>(objects)).execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithCollection#inCollection(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public InsertWithBulkMode<T> inCollection(String collection) {
|
public InsertWithBulkMode<T> inCollection(String collection) {
|
||||||
|
|
||||||
Assert.hasText(collection, "Collection must not be null nor empty");
|
Assert.hasText(collection, "Collection must not be null nor empty.");
|
||||||
|
|
||||||
return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode);
|
return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithBulkMode#withBulkMode(org.springframework.data.mongodb.core.BulkMode)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingBulkInsert<T> withBulkMode(BulkMode bulkMode) {
|
public TerminatingBulkInsert<T> withBulkMode(BulkMode bulkMode) {
|
||||||
|
|
||||||
Assert.notNull(bulkMode, "BulkMode must not be null");
|
Assert.notNull(bulkMode, "BulkMode must not be null!");
|
||||||
|
|
||||||
return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode);
|
return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -187,9 +187,7 @@ public interface ExecutableMapReduceOperation {
|
|||||||
*
|
*
|
||||||
* @author Christoph Strobl
|
* @author Christoph Strobl
|
||||||
* @since 2.1
|
* @since 2.1
|
||||||
* @deprecated since 4.0 in favor of {@link org.springframework.data.mongodb.core.aggregation}.
|
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
|
||||||
interface MapReduceWithOptions<T> {
|
interface MapReduceWithOptions<T> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
|
|
||||||
ExecutableMapReduceOperationSupport(MongoTemplate template) {
|
ExecutableMapReduceOperationSupport(MongoTemplate template) {
|
||||||
|
|
||||||
Assert.notNull(template, "Template must not be null");
|
Assert.notNull(template, "Template must not be null!");
|
||||||
this.template = template;
|
this.template = template;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -48,7 +48,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
@Override
|
@Override
|
||||||
public <T> ExecutableMapReduceSupport<T> mapReduce(Class<T> domainType) {
|
public <T> ExecutableMapReduceSupport<T> mapReduce(Class<T> domainType) {
|
||||||
|
|
||||||
Assert.notNull(domainType, "DomainType must not be null");
|
Assert.notNull(domainType, "DomainType must not be null!");
|
||||||
|
|
||||||
return new ExecutableMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null);
|
return new ExecutableMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null);
|
||||||
}
|
}
|
||||||
@@ -101,7 +101,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
@Override
|
@Override
|
||||||
public MapReduceWithProjection<T> inCollection(String collection) {
|
public MapReduceWithProjection<T> inCollection(String collection) {
|
||||||
|
|
||||||
Assert.hasText(collection, "Collection name must not be null nor empty");
|
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||||
|
|
||||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||||
reduceFunction, options);
|
reduceFunction, options);
|
||||||
@@ -114,7 +114,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
@Override
|
@Override
|
||||||
public TerminatingMapReduce<T> matching(Query query) {
|
public TerminatingMapReduce<T> matching(Query query) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
|
|
||||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||||
reduceFunction, options);
|
reduceFunction, options);
|
||||||
@@ -127,7 +127,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
@Override
|
@Override
|
||||||
public <R> MapReduceWithQuery<R> as(Class<R> resultType) {
|
public <R> MapReduceWithQuery<R> as(Class<R> resultType) {
|
||||||
|
|
||||||
Assert.notNull(resultType, "ResultType must not be null");
|
Assert.notNull(resultType, "ResultType must not be null!");
|
||||||
|
|
||||||
return new ExecutableMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction,
|
return new ExecutableMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction,
|
||||||
reduceFunction, options);
|
reduceFunction, options);
|
||||||
@@ -140,7 +140,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
@Override
|
@Override
|
||||||
public ExecutableMapReduce<T> with(MapReduceOptions options) {
|
public ExecutableMapReduce<T> with(MapReduceOptions options) {
|
||||||
|
|
||||||
Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead");
|
Assert.notNull(options, "Options must not be null! Please consider empty MapReduceOptions#options() instead.");
|
||||||
|
|
||||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||||
reduceFunction, options);
|
reduceFunction, options);
|
||||||
@@ -153,7 +153,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
@Override
|
@Override
|
||||||
public MapReduceWithReduceFunction<T> map(String mapFunction) {
|
public MapReduceWithReduceFunction<T> map(String mapFunction) {
|
||||||
|
|
||||||
Assert.hasText(mapFunction, "MapFunction name must not be null nor empty");
|
Assert.hasText(mapFunction, "MapFunction name must not be null nor empty!");
|
||||||
|
|
||||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||||
reduceFunction, options);
|
reduceFunction, options);
|
||||||
@@ -166,7 +166,7 @@ class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperatio
|
|||||||
@Override
|
@Override
|
||||||
public ExecutableMapReduce<T> reduce(String reduceFunction) {
|
public ExecutableMapReduce<T> reduce(String reduceFunction) {
|
||||||
|
|
||||||
Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty");
|
Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty!");
|
||||||
|
|
||||||
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
|
||||||
reduceFunction, options);
|
reduceFunction, options);
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ public interface ExecutableRemoveOperation {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove and return all matching documents. <br/>
|
* Remove and return all matching documents. <br/>
|
||||||
* <strong>NOTE:</strong> The entire list of documents will be fetched before sending the actual delete commands.
|
* <strong>NOTE</strong> The entire list of documents will be fetched before sending the actual delete commands.
|
||||||
* Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete
|
* Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete
|
||||||
* operation.
|
* operation.
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -41,10 +41,14 @@ class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
|||||||
this.tempate = tempate;
|
this.tempate = tempate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <T> ExecutableRemove<T> remove(Class<T> domainType) {
|
public <T> ExecutableRemove<T> remove(Class<T> domainType) {
|
||||||
|
|
||||||
Assert.notNull(domainType, "DomainType must not be null");
|
Assert.notNull(domainType, "DomainType must not be null!");
|
||||||
|
|
||||||
return new ExecutableRemoveSupport<>(tempate, domainType, ALL_QUERY, null);
|
return new ExecutableRemoveSupport<>(tempate, domainType, ALL_QUERY, null);
|
||||||
}
|
}
|
||||||
@@ -67,32 +71,52 @@ class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
|||||||
this.collection = collection;
|
this.collection = collection;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithCollection#inCollection(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public RemoveWithQuery<T> inCollection(String collection) {
|
public RemoveWithQuery<T> inCollection(String collection) {
|
||||||
|
|
||||||
Assert.hasText(collection, "Collection must not be null nor empty");
|
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||||
|
|
||||||
return new ExecutableRemoveSupport<>(template, domainType, query, collection);
|
return new ExecutableRemoveSupport<>(template, domainType, query, collection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithQuery#matching(org.springframework.data.mongodb.core.query.Query)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingRemove<T> matching(Query query) {
|
public TerminatingRemove<T> matching(Query query) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
|
|
||||||
return new ExecutableRemoveSupport<>(template, domainType, query, collection);
|
return new ExecutableRemoveSupport<>(template, domainType, query, collection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#all()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public DeleteResult all() {
|
public DeleteResult all() {
|
||||||
return template.doRemove(getCollectionName(), query, domainType, true);
|
return template.doRemove(getCollectionName(), query, domainType, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#one()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public DeleteResult one() {
|
public DeleteResult one() {
|
||||||
return template.doRemove(getCollectionName(), query, domainType, false);
|
return template.doRemove(getCollectionName(), query, domainType, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#findAndRemove()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<T> findAndRemove() {
|
public List<T> findAndRemove() {
|
||||||
|
|
||||||
|
|||||||
@@ -40,10 +40,14 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
|||||||
this.template = template;
|
this.template = template;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <T> ExecutableUpdate<T> update(Class<T> domainType) {
|
public <T> ExecutableUpdate<T> update(Class<T> domainType) {
|
||||||
|
|
||||||
Assert.notNull(domainType, "DomainType must not be null");
|
Assert.notNull(domainType, "DomainType must not be null!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType);
|
return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType);
|
||||||
}
|
}
|
||||||
@@ -81,84 +85,128 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
|||||||
this.targetType = targetType;
|
this.targetType = targetType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.UpdateDefinition)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingUpdate<T> apply(UpdateDefinition update) {
|
public TerminatingUpdate<T> apply(UpdateDefinition update) {
|
||||||
|
|
||||||
Assert.notNull(update, "Update must not be null");
|
Assert.notNull(update, "Update must not be null!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||||
findAndReplaceOptions, replacement, targetType);
|
findAndReplaceOptions, replacement, targetType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithCollection#inCollection(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public UpdateWithQuery<T> inCollection(String collection) {
|
public UpdateWithQuery<T> inCollection(String collection) {
|
||||||
|
|
||||||
Assert.hasText(collection, "Collection must not be null nor empty");
|
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||||
findAndReplaceOptions, replacement, targetType);
|
findAndReplaceOptions, replacement, targetType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndModifyWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndModifyOptions)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingFindAndModify<T> withOptions(FindAndModifyOptions options) {
|
public TerminatingFindAndModify<T> withOptions(FindAndModifyOptions options) {
|
||||||
|
|
||||||
Assert.notNull(options, "Options must not be null");
|
Assert.notNull(options, "Options must not be null!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options,
|
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options,
|
||||||
findAndReplaceOptions, replacement, targetType);
|
findAndReplaceOptions, replacement, targetType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#replaceWith(Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public FindAndReplaceWithProjection<T> replaceWith(T replacement) {
|
public FindAndReplaceWithProjection<T> replaceWith(T replacement) {
|
||||||
|
|
||||||
Assert.notNull(replacement, "Replacement must not be null");
|
Assert.notNull(replacement, "Replacement must not be null!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||||
findAndReplaceOptions, replacement, targetType);
|
findAndReplaceOptions, replacement, targetType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public FindAndReplaceWithProjection<T> withOptions(FindAndReplaceOptions options) {
|
public FindAndReplaceWithProjection<T> withOptions(FindAndReplaceOptions options) {
|
||||||
|
|
||||||
Assert.notNull(options, "Options must not be null");
|
Assert.notNull(options, "Options must not be null!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||||
options, replacement, targetType);
|
options, replacement, targetType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery#matching(org.springframework.data.mongodb.core.query.Query)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public UpdateWithUpdate<T> matching(Query query) {
|
public UpdateWithUpdate<T> matching(Query query) {
|
||||||
|
|
||||||
Assert.notNull(query, "Query must not be null");
|
Assert.notNull(query, "Query must not be null!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||||
findAndReplaceOptions, replacement, targetType);
|
findAndReplaceOptions, replacement, targetType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithProjection#as(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <R> FindAndReplaceWithOptions<R> as(Class<R> resultType) {
|
public <R> FindAndReplaceWithOptions<R> as(Class<R> resultType) {
|
||||||
|
|
||||||
Assert.notNull(resultType, "ResultType must not be null");
|
Assert.notNull(resultType, "ResultType must not be null!");
|
||||||
|
|
||||||
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions,
|
||||||
findAndReplaceOptions, replacement, resultType);
|
findAndReplaceOptions, replacement, resultType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#all()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public UpdateResult all() {
|
public UpdateResult all() {
|
||||||
return doUpdate(true, false);
|
return doUpdate(true, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#first()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public UpdateResult first() {
|
public UpdateResult first() {
|
||||||
return doUpdate(false, false);
|
return doUpdate(false, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#upsert()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public UpdateResult upsert() {
|
public UpdateResult upsert() {
|
||||||
return doUpdate(true, true);
|
return doUpdate(true, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndModify#findAndModifyValue()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public @Nullable T findAndModifyValue() {
|
public @Nullable T findAndModifyValue() {
|
||||||
|
|
||||||
@@ -167,6 +215,10 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
|||||||
getCollectionName());
|
getCollectionName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndReplace#findAndReplaceValue()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public @Nullable T findAndReplaceValue() {
|
public @Nullable T findAndReplaceValue() {
|
||||||
|
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ public class FindAndModifyOptions {
|
|||||||
|
|
||||||
private static final FindAndModifyOptions NONE = new FindAndModifyOptions() {
|
private static final FindAndModifyOptions NONE = new FindAndModifyOptions() {
|
||||||
|
|
||||||
private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed; Please use FindAndModifyOptions.options() instead";
|
private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed. Please use FindAndModifyOptions.options() instead.";
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FindAndModifyOptions returnNew(boolean returnNew) {
|
public FindAndModifyOptions returnNew(boolean returnNew) {
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ public class FindAndReplaceOptions {
|
|||||||
|
|
||||||
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
|
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
|
||||||
|
|
||||||
private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed; Please use FindAndReplaceOptions.options() instead";
|
private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed. Please use FindAndReplaceOptions.options() instead.";
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FindAndReplaceOptions returnNew() {
|
public FindAndReplaceOptions returnNew() {
|
||||||
|
|||||||
@@ -61,8 +61,8 @@ public interface FindPublisherPreparer extends ReadPreferenceAware {
|
|||||||
default FindPublisher<Document> initiateFind(MongoCollection<Document> collection,
|
default FindPublisher<Document> initiateFind(MongoCollection<Document> collection,
|
||||||
Function<MongoCollection<Document>, FindPublisher<Document>> find) {
|
Function<MongoCollection<Document>, FindPublisher<Document>> find) {
|
||||||
|
|
||||||
Assert.notNull(collection, "Collection must not be null");
|
Assert.notNull(collection, "Collection must not be null!");
|
||||||
Assert.notNull(find, "Find function must not be null");
|
Assert.notNull(find, "Find function must not be null!");
|
||||||
|
|
||||||
if (hasReadPreference()) {
|
if (hasReadPreference()) {
|
||||||
collection = collection.withReadPreference(getReadPreference());
|
collection = collection.withReadPreference(getReadPreference());
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ class GeoCommandStatistics {
|
|||||||
*/
|
*/
|
||||||
private GeoCommandStatistics(Document source) {
|
private GeoCommandStatistics(Document source) {
|
||||||
|
|
||||||
Assert.notNull(source, "Source document must not be null");
|
Assert.notNull(source, "Source document must not be null!");
|
||||||
this.source = source;
|
this.source = source;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,7 +51,7 @@ class GeoCommandStatistics {
|
|||||||
*/
|
*/
|
||||||
public static GeoCommandStatistics from(Document commandResult) {
|
public static GeoCommandStatistics from(Document commandResult) {
|
||||||
|
|
||||||
Assert.notNull(commandResult, "Command result must not be null");
|
Assert.notNull(commandResult, "Command result must not be null!");
|
||||||
|
|
||||||
Object stats = commandResult.get("stats");
|
Object stats = commandResult.get("stats");
|
||||||
return stats == null ? NONE : new GeoCommandStatistics((Document) stats);
|
return stats == null ? NONE : new GeoCommandStatistics((Document) stats);
|
||||||
|
|||||||
@@ -1,129 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.core;
|
|
||||||
|
|
||||||
import java.util.function.Function;
|
|
||||||
|
|
||||||
import org.bson.conversions.Bson;
|
|
||||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
|
||||||
import org.springframework.data.mongodb.util.BsonUtils;
|
|
||||||
import org.springframework.lang.Nullable;
|
|
||||||
import org.springframework.util.StringUtils;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Function object to apply a query hint. Can be an index name or a BSON document.
|
|
||||||
*
|
|
||||||
* @author Mark Paluch
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
class HintFunction {
|
|
||||||
|
|
||||||
private static final HintFunction EMPTY = new HintFunction(null);
|
|
||||||
|
|
||||||
private final @Nullable Object hint;
|
|
||||||
|
|
||||||
private HintFunction(@Nullable Object hint) {
|
|
||||||
this.hint = hint;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an empty hint function.
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
static HintFunction empty() {
|
|
||||||
return EMPTY;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a {@link HintFunction} from a {@link Bson document} or {@link String index name}.
|
|
||||||
*
|
|
||||||
* @param hint
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
static HintFunction from(@Nullable Object hint) {
|
|
||||||
return new HintFunction(hint);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return whether a hint is present.
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public boolean isPresent() {
|
|
||||||
return (hint instanceof String hintString && StringUtils.hasText(hintString)) || hint instanceof Bson;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If a hint is not present, returns {@code true}, otherwise {@code false}.
|
|
||||||
*
|
|
||||||
* @return {@code true} if a hint is not present, otherwise {@code false}.
|
|
||||||
*/
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return !isPresent();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply the hint to consumers depending on the hint format if {@link #isPresent() present}.
|
|
||||||
*
|
|
||||||
* @param registryProvider
|
|
||||||
* @param stringConsumer
|
|
||||||
* @param bsonConsumer
|
|
||||||
* @param <R>
|
|
||||||
*/
|
|
||||||
public <R> void ifPresent(@Nullable CodecRegistryProvider registryProvider, Function<String, R> stringConsumer,
|
|
||||||
Function<Bson, R> bsonConsumer) {
|
|
||||||
|
|
||||||
if (isEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
apply(registryProvider, stringConsumer, bsonConsumer);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply the hint to consumers depending on the hint format.
|
|
||||||
*
|
|
||||||
* @param registryProvider
|
|
||||||
* @param stringConsumer
|
|
||||||
* @param bsonConsumer
|
|
||||||
* @return
|
|
||||||
* @param <R>
|
|
||||||
*/
|
|
||||||
public <R> R apply(@Nullable CodecRegistryProvider registryProvider, Function<String, R> stringConsumer,
|
|
||||||
Function<Bson, R> bsonConsumer) {
|
|
||||||
|
|
||||||
if (isEmpty()) {
|
|
||||||
throw new IllegalStateException("No hint present");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hint instanceof Bson bson) {
|
|
||||||
return bsonConsumer.apply(bson);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hint instanceof String hintString) {
|
|
||||||
|
|
||||||
if (BsonUtils.isJsonDocument(hintString)) {
|
|
||||||
return bsonConsumer.apply(BsonUtils.parse(hintString, registryProvider));
|
|
||||||
}
|
|
||||||
return stringConsumer.apply(hintString);
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new IllegalStateException(
|
|
||||||
"Unable to read hint of type %s".formatted(hint != null ? hint.getClass() : "null"));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -119,10 +119,6 @@ abstract class IndexConverters {
|
|||||||
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (indexOptions.containsKey("hidden")) {
|
|
||||||
ops = ops.hidden((Boolean) indexOptions.get("hidden"));
|
|
||||||
}
|
|
||||||
|
|
||||||
return ops;
|
return ops;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -122,31 +122,55 @@ public class MappedDocument {
|
|||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getUpdateObject()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Document getUpdateObject() {
|
public Document getUpdateObject() {
|
||||||
return delegate.getUpdateObject();
|
return delegate.getUpdateObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#modifies(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean modifies(String key) {
|
public boolean modifies(String key) {
|
||||||
return delegate.modifies(key);
|
return delegate.modifies(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#inc(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void inc(String version) {
|
public void inc(String version) {
|
||||||
delegate.inc(version);
|
delegate.inc(version);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#isIsolated()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Boolean isIsolated() {
|
public Boolean isIsolated() {
|
||||||
return delegate.isIsolated();
|
return delegate.isIsolated();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public List<ArrayFilter> getArrayFilters() {
|
public List<ArrayFilter> getArrayFilters() {
|
||||||
return delegate.getArrayFilters();
|
return delegate.getArrayFilters();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean hasArrayFilters() {
|
public boolean hasArrayFilters() {
|
||||||
return delegate.hasArrayFilters();
|
return delegate.hasArrayFilters();
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
|||||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||||
import org.springframework.data.util.TypeInformation;
|
import org.springframework.data.util.ClassTypeInformation;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.util.ClassUtils;
|
import org.springframework.util.ClassUtils;
|
||||||
import org.springframework.util.CollectionUtils;
|
import org.springframework.util.CollectionUtils;
|
||||||
@@ -81,7 +81,7 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
|||||||
MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||||
Predicate<JsonSchemaPropertyContext> filter, LinkedMultiValueMap<String, Class<?>> mergeProperties) {
|
Predicate<JsonSchemaPropertyContext> filter, LinkedMultiValueMap<String, Class<?>> mergeProperties) {
|
||||||
|
|
||||||
Assert.notNull(converter, "Converter must not be null");
|
Assert.notNull(converter, "Converter must not be null!");
|
||||||
this.converter = converter;
|
this.converter = converter;
|
||||||
this.mappingContext = mappingContext;
|
this.mappingContext = mappingContext;
|
||||||
this.filter = filter;
|
this.filter = filter;
|
||||||
@@ -115,6 +115,10 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
|||||||
return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, clone);
|
return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, clone);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MongoJsonSchema createSchemaFor(Class<?> type) {
|
public MongoJsonSchema createSchemaFor(Class<?> type) {
|
||||||
|
|
||||||
@@ -267,7 +271,7 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean isSpecificType(MongoPersistentProperty property) {
|
private boolean isSpecificType(MongoPersistentProperty property) {
|
||||||
return !TypeInformation.OBJECT.equals(property.getTypeInformation().getActualType());
|
return !ClassTypeInformation.OBJECT.equals(property.getTypeInformation().getActualType());
|
||||||
}
|
}
|
||||||
|
|
||||||
private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property,
|
private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property,
|
||||||
@@ -322,7 +326,7 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
|||||||
|
|
||||||
private TypedJsonSchemaObject createSchemaObject(Object type, Collection<?> possibleValues) {
|
private TypedJsonSchemaObject createSchemaObject(Object type, Collection<?> possibleValues) {
|
||||||
|
|
||||||
TypedJsonSchemaObject schemaObject = type instanceof Type typeObject ? JsonSchemaObject.of(typeObject)
|
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||||
: JsonSchemaObject.of(Class.class.cast(type));
|
: JsonSchemaObject.of(Class.class.cast(type));
|
||||||
|
|
||||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||||
@@ -331,22 +335,23 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
|||||||
return schemaObject;
|
return schemaObject;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String computePropertyFieldName(PersistentProperty<?> property) {
|
private String computePropertyFieldName(PersistentProperty property) {
|
||||||
|
|
||||||
return property instanceof MongoPersistentProperty mongoPersistentProperty ?
|
return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName()
|
||||||
mongoPersistentProperty.getFieldName() : property.getName();
|
: property.getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isRequiredProperty(PersistentProperty<?> property) {
|
private boolean isRequiredProperty(PersistentProperty property) {
|
||||||
return property.getType().isPrimitive();
|
return property.getType().isPrimitive();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Class<?> computeTargetType(PersistentProperty<?> property) {
|
private Class<?> computeTargetType(PersistentProperty<?> property) {
|
||||||
|
|
||||||
if (!(property instanceof MongoPersistentProperty mongoProperty)) {
|
if (!(property instanceof MongoPersistentProperty)) {
|
||||||
return property.getType();
|
return property.getType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property;
|
||||||
if (!mongoProperty.isIdProperty()) {
|
if (!mongoProperty.isIdProperty()) {
|
||||||
return mongoProperty.getFieldType();
|
return mongoProperty.getFieldType();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,8 +57,8 @@ public class MongoAction {
|
|||||||
public MongoAction(@Nullable WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
public MongoAction(@Nullable WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation,
|
||||||
String collectionName, @Nullable Class<?> entityType, @Nullable Document document, @Nullable Document query) {
|
String collectionName, @Nullable Class<?> entityType, @Nullable Document document, @Nullable Document query) {
|
||||||
|
|
||||||
Assert.hasText(collectionName, "Collection name must not be null or empty");
|
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||||
Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null");
|
Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null!");
|
||||||
|
|
||||||
this.defaultWriteConcern = defaultWriteConcern;
|
this.defaultWriteConcern = defaultWriteConcern;
|
||||||
this.mongoActionOperation = mongoActionOperation;
|
this.mongoActionOperation = mongoActionOperation;
|
||||||
|
|||||||
@@ -42,20 +42,29 @@ public class MongoAdmin implements MongoAdminOperations {
|
|||||||
*/
|
*/
|
||||||
public MongoAdmin(MongoClient client) {
|
public MongoAdmin(MongoClient client) {
|
||||||
|
|
||||||
Assert.notNull(client, "Client must not be null");
|
Assert.notNull(client, "Client must not be null!");
|
||||||
this.mongoClient = client;
|
this.mongoClient = client;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.core.MongoAdminOperations#dropDatabase(java.lang.String)
|
||||||
|
*/
|
||||||
@ManagedOperation
|
@ManagedOperation
|
||||||
public void dropDatabase(String databaseName) {
|
public void dropDatabase(String databaseName) {
|
||||||
getDB(databaseName).drop();
|
getDB(databaseName).drop();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.core.MongoAdminOperations#createDatabase(java.lang.String)
|
||||||
|
*/
|
||||||
@ManagedOperation
|
@ManagedOperation
|
||||||
public void createDatabase(String databaseName) {
|
public void createDatabase(String databaseName) {
|
||||||
getDB(databaseName);
|
getDB(databaseName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.core.MongoAdminOperations#getDatabaseStats(java.lang.String)
|
||||||
|
*/
|
||||||
@ManagedOperation
|
@ManagedOperation
|
||||||
public String getDatabaseStats(String databaseName) {
|
public String getDatabaseStats(String databaseName) {
|
||||||
return getDB(databaseName).runCommand(new Document("dbStats", 1).append("scale", 1024)).toJson();
|
return getDB(databaseName).runCommand(new Document("dbStats", 1).append("scale", 1024)).toJson();
|
||||||
|
|||||||
@@ -119,15 +119,27 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
|||||||
this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator;
|
this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||||
|
*/
|
||||||
public Class<? extends MongoClient> getObjectType() {
|
public Class<? extends MongoClient> getObjectType() {
|
||||||
return MongoClient.class;
|
return MongoClient.class;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException)
|
||||||
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
|
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
|
||||||
return exceptionTranslator.translateExceptionIfPossible(ex);
|
return exceptionTranslator.translateExceptionIfPossible(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected MongoClient createInstance() throws Exception {
|
protected MongoClient createInstance() throws Exception {
|
||||||
return createMongoClient(computeClientSetting());
|
return createMongoClient(computeClientSetting());
|
||||||
@@ -146,7 +158,7 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
|||||||
protected MongoClientSettings computeClientSetting() {
|
protected MongoClientSettings computeClientSetting() {
|
||||||
|
|
||||||
if (connectionString != null && (StringUtils.hasText(host) || port != null)) {
|
if (connectionString != null && (StringUtils.hasText(host) || port != null)) {
|
||||||
throw new IllegalStateException("ConnectionString and host/port configuration exclude one another");
|
throw new IllegalStateException("ConnectionString and host/port configuration exclude one another!");
|
||||||
}
|
}
|
||||||
|
|
||||||
ConnectionString connectionString = this.connectionString != null ? this.connectionString
|
ConnectionString connectionString = this.connectionString != null ? this.connectionString
|
||||||
@@ -324,6 +336,10 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
|||||||
return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue;
|
return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.config.AbstractFactoryBean#destroyInstance(java.lang.Object)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void destroyInstance(@Nullable MongoClient instance) throws Exception {
|
protected void destroyInstance(@Nullable MongoClient instance) throws Exception {
|
||||||
|
|
||||||
@@ -337,11 +353,6 @@ public class MongoClientFactoryBean extends AbstractFactoryBean<MongoClient> imp
|
|||||||
}
|
}
|
||||||
|
|
||||||
private String getOrDefault(Object value, String defaultValue) {
|
private String getOrDefault(Object value, String defaultValue) {
|
||||||
|
return !StringUtils.isEmpty(value) ? value.toString() : defaultValue;
|
||||||
if(value == null) {
|
|
||||||
return defaultValue;
|
|
||||||
}
|
|
||||||
String sValue = value.toString();
|
|
||||||
return StringUtils.hasText(sValue) ? sValue : defaultValue;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,8 +44,8 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation
|
|||||||
|
|
||||||
super(message);
|
super(message);
|
||||||
|
|
||||||
Assert.notNull(writeResult, "WriteResult must not be null");
|
Assert.notNull(writeResult, "WriteResult must not be null!");
|
||||||
Assert.notNull(actionOperation, "MongoActionOperation must not be null");
|
Assert.notNull(actionOperation, "MongoActionOperation must not be null!");
|
||||||
|
|
||||||
this.writeResult = writeResult;
|
this.writeResult = writeResult;
|
||||||
this.actionOperation = actionOperation;
|
this.actionOperation = actionOperation;
|
||||||
|
|||||||
@@ -64,10 +64,10 @@ public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFac
|
|||||||
protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||||
PersistenceExceptionTranslator exceptionTranslator) {
|
PersistenceExceptionTranslator exceptionTranslator) {
|
||||||
|
|
||||||
Assert.notNull(mongoClient, "MongoClient must not be null");
|
Assert.notNull(mongoClient, "MongoClient must not be null!");
|
||||||
Assert.hasText(databaseName, "Database name must not be empty");
|
Assert.hasText(databaseName, "Database name must not be empty!");
|
||||||
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"),
|
||||||
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs");
|
"Database name must not contain slashes, dots, spaces, quotes, or dollar signs!");
|
||||||
|
|
||||||
this.mongoClient = mongoClient;
|
this.mongoClient = mongoClient;
|
||||||
this.databaseName = databaseName;
|
this.databaseName = databaseName;
|
||||||
@@ -84,14 +84,22 @@ public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFac
|
|||||||
this.writeConcern = writeConcern;
|
this.writeConcern = writeConcern;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||||
|
*/
|
||||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||||
return getMongoDatabase(getDefaultDatabaseName());
|
return getMongoDatabase(getDefaultDatabaseName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||||
|
|
||||||
Assert.hasText(dbName, "Database name must not be empty");
|
Assert.hasText(dbName, "Database name must not be empty!");
|
||||||
|
|
||||||
MongoDatabase db = doGetMongoDatabase(dbName);
|
MongoDatabase db = doGetMongoDatabase(dbName);
|
||||||
|
|
||||||
@@ -110,16 +118,28 @@ public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFac
|
|||||||
*/
|
*/
|
||||||
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
protected abstract MongoDatabase doGetMongoDatabase(String dbName);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.DisposableBean#destroy()
|
||||||
|
*/
|
||||||
public void destroy() throws Exception {
|
public void destroy() throws Exception {
|
||||||
if (mongoInstanceCreated) {
|
if (mongoInstanceCreated) {
|
||||||
closeClient();
|
closeClient();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||||
|
*/
|
||||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||||
return this.exceptionTranslator;
|
return this.exceptionTranslator;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session)
|
||||||
|
*/
|
||||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||||
return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this);
|
||||||
}
|
}
|
||||||
@@ -160,31 +180,55 @@ public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFac
|
|||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
public MongoDatabase getMongoDatabase() throws DataAccessException {
|
||||||
return proxyMongoDatabase(delegate.getMongoDatabase());
|
return proxyMongoDatabase(delegate.getMongoDatabase());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#getMongoDatabase(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException {
|
||||||
return proxyMongoDatabase(delegate.getMongoDatabase(dbName));
|
return proxyMongoDatabase(delegate.getMongoDatabase(dbName));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public PersistenceExceptionTranslator getExceptionTranslator() {
|
public PersistenceExceptionTranslator getExceptionTranslator() {
|
||||||
return delegate.getExceptionTranslator();
|
return delegate.getExceptionTranslator();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public ClientSession getSession(ClientSessionOptions options) {
|
public ClientSession getSession(ClientSessionOptions options) {
|
||||||
return delegate.getSession(options);
|
return delegate.getSession(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public MongoDatabaseFactory withSession(ClientSession session) {
|
public MongoDatabaseFactory withSession(ClientSession session) {
|
||||||
return delegate.withSession(session);
|
return delegate.withSession(session);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.MongoDbFactory#isTransactionActive()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isTransactionActive() {
|
public boolean isTransactionActive() {
|
||||||
return session != null && session.hasActiveTransaction();
|
return session != null && session.hasActiveTransaction();
|
||||||
|
|||||||
@@ -0,0 +1,50 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2018-2023 the original author or authors.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.springframework.data.mongodb.core;
|
||||||
|
|
||||||
|
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||||
|
* database name and exception translator.
|
||||||
|
* <br />
|
||||||
|
* Not intended to be used directly.
|
||||||
|
*
|
||||||
|
* @author Christoph Strobl
|
||||||
|
* @author Mark Paluch
|
||||||
|
* @param <C> Client type.
|
||||||
|
* @since 2.1
|
||||||
|
* @see SimpleMongoClientDatabaseFactory
|
||||||
|
* @deprecated since 3.0, use {@link MongoDatabaseFactorySupport} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
public abstract class MongoDbFactorySupport<C> extends MongoDatabaseFactorySupport<C> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName},
|
||||||
|
* {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}.
|
||||||
|
*
|
||||||
|
* @param mongoClient must not be {@literal null}.
|
||||||
|
* @param databaseName must not be {@literal null} or empty.
|
||||||
|
* @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of
|
||||||
|
* {@link MongoDbFactorySupport} to close the client on {@link #destroy()}.
|
||||||
|
* @param exceptionTranslator must not be {@literal null}.
|
||||||
|
*/
|
||||||
|
protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated,
|
||||||
|
PersistenceExceptionTranslator exceptionTranslator) {
|
||||||
|
super(mongoClient, databaseName, mongoInstanceCreated, exceptionTranslator);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -88,6 +88,10 @@ public class MongoEncryptionSettingsFactoryBean implements FactoryBean<AutoEncry
|
|||||||
this.schemaMap = schemaMap;
|
this.schemaMap = schemaMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public AutoEncryptionSettings getObject() {
|
public AutoEncryptionSettings getObject() {
|
||||||
|
|
||||||
@@ -105,6 +109,10 @@ public class MongoEncryptionSettingsFactoryBean implements FactoryBean<AutoEncry
|
|||||||
return source != null ? source : Collections.emptyMap();
|
return source != null ? source : Collections.emptyMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Class<?> getObjectType() {
|
public Class<?> getObjectType() {
|
||||||
return AutoEncryptionSettings.class;
|
return AutoEncryptionSettings.class;
|
||||||
|
|||||||
@@ -68,8 +68,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
|||||||
private static final Set<String> DATA_INTEGRITY_EXCEPTIONS = new HashSet<>(
|
private static final Set<String> DATA_INTEGRITY_EXCEPTIONS = new HashSet<>(
|
||||||
Arrays.asList("WriteConcernException", "MongoWriteException", "MongoBulkWriteException"));
|
Arrays.asList("WriteConcernException", "MongoWriteException", "MongoBulkWriteException"));
|
||||||
|
|
||||||
private static final Set<String> SECURITY_EXCEPTIONS = Set.of("MongoCryptException");
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException)
|
||||||
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
|
public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
|
||||||
|
|
||||||
@@ -99,12 +101,12 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
|||||||
|
|
||||||
if (DATA_INTEGRITY_EXCEPTIONS.contains(exception)) {
|
if (DATA_INTEGRITY_EXCEPTIONS.contains(exception)) {
|
||||||
|
|
||||||
if (ex instanceof MongoServerException mse) {
|
if (ex instanceof MongoServerException) {
|
||||||
if (mse.getCode() == 11000) {
|
if (((MongoServerException) ex).getCode() == 11000) {
|
||||||
return new DuplicateKeyException(ex.getMessage(), ex);
|
return new DuplicateKeyException(ex.getMessage(), ex);
|
||||||
}
|
}
|
||||||
if (ex instanceof MongoBulkWriteException bulkException) {
|
if (ex instanceof MongoBulkWriteException) {
|
||||||
for (BulkWriteError x : bulkException.getWriteErrors()) {
|
for (BulkWriteError x : ((MongoBulkWriteException) ex).getWriteErrors()) {
|
||||||
if (x.getCode() == 11000) {
|
if (x.getCode() == 11000) {
|
||||||
return new DuplicateKeyException(ex.getMessage(), ex);
|
return new DuplicateKeyException(ex.getMessage(), ex);
|
||||||
}
|
}
|
||||||
@@ -116,9 +118,9 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
|||||||
}
|
}
|
||||||
|
|
||||||
// All other MongoExceptions
|
// All other MongoExceptions
|
||||||
if (ex instanceof MongoException mongoException) {
|
if (ex instanceof MongoException) {
|
||||||
|
|
||||||
int code = mongoException.getCode();
|
int code = ((MongoException) ex).getCode();
|
||||||
|
|
||||||
if (MongoDbErrorCodes.isDuplicateKeyCode(code)) {
|
if (MongoDbErrorCodes.isDuplicateKeyCode(code)) {
|
||||||
return new DuplicateKeyException(ex.getMessage(), ex);
|
return new DuplicateKeyException(ex.getMessage(), ex);
|
||||||
@@ -133,8 +135,6 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
|||||||
return new ClientSessionException(ex.getMessage(), ex);
|
return new ClientSessionException(ex.getMessage(), ex);
|
||||||
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
} else if (MongoDbErrorCodes.isTransactionFailureCode(code)) {
|
||||||
return new MongoTransactionException(ex.getMessage(), ex);
|
return new MongoTransactionException(ex.getMessage(), ex);
|
||||||
} else if(ex.getCause() != null && SECURITY_EXCEPTIONS.contains(ClassUtils.getShortName(ex.getCause().getClass()))) {
|
|
||||||
return new PermissionDeniedDataAccessException(ex.getMessage(), ex);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
return new UncategorizedMongoDbException(ex.getMessage(), ex);
|
||||||
|
|||||||
@@ -192,7 +192,7 @@ public interface MongoJsonSchemaCreator {
|
|||||||
*/
|
*/
|
||||||
static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
|
static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
|
||||||
|
|
||||||
Assert.notNull(mongoConverter, "MongoConverter must not be null");
|
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,23 +20,20 @@ import java.util.List;
|
|||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.springframework.data.domain.KeysetScrollPosition;
|
|
||||||
import org.springframework.data.domain.Window;
|
|
||||||
import org.springframework.data.geo.GeoResults;
|
import org.springframework.data.geo.GeoResults;
|
||||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||||
import org.springframework.data.mongodb.core.index.IndexOperations;
|
import org.springframework.data.mongodb.core.index.IndexOperations;
|
||||||
|
import org.springframework.data.mongodb.core.mapreduce.GroupBy;
|
||||||
|
import org.springframework.data.mongodb.core.mapreduce.GroupByResults;
|
||||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
|
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
|
||||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||||
@@ -45,6 +42,7 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
|||||||
import org.springframework.data.mongodb.core.query.Query;
|
import org.springframework.data.mongodb.core.query.Query;
|
||||||
import org.springframework.data.mongodb.core.query.Update;
|
import org.springframework.data.mongodb.core.query.Update;
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||||
|
import org.springframework.data.util.CloseableIterator;
|
||||||
import org.springframework.lang.Nullable;
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.Assert;
|
import org.springframework.util.Assert;
|
||||||
import org.springframework.util.ClassUtils;
|
import org.springframework.util.ClassUtils;
|
||||||
@@ -183,7 +181,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
*/
|
*/
|
||||||
default SessionScoped withSession(Supplier<ClientSession> sessionProvider) {
|
default SessionScoped withSession(Supplier<ClientSession> sessionProvider) {
|
||||||
|
|
||||||
Assert.notNull(sessionProvider, "SessionProvider must not be null");
|
Assert.notNull(sessionProvider, "SessionProvider must not be null!");
|
||||||
|
|
||||||
return new SessionScoped() {
|
return new SessionScoped() {
|
||||||
|
|
||||||
@@ -222,34 +220,34 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB
|
||||||
* {@link com.mongodb.client.FindIterable}.
|
* {@link com.mongodb.client.FindIterable}.
|
||||||
* <p>
|
* <p>
|
||||||
* Returns a {@link String} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed.
|
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||||
|
* be closed.
|
||||||
*
|
*
|
||||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||||
* specification. Must not be {@literal null}.
|
* specification. Must not be {@literal null}.
|
||||||
* @param entityType must not be {@literal null}.
|
* @param entityType must not be {@literal null}.
|
||||||
* @param <T> element return type
|
* @param <T> element return type
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return will never be {@literal null}.
|
||||||
* through a try-with-resources clause).
|
|
||||||
* @since 1.7
|
* @since 1.7
|
||||||
*/
|
*/
|
||||||
<T> Stream<T> stream(Query query, Class<T> entityType);
|
<T> CloseableIterator<T> stream(Query query, Class<T> entityType);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed
|
* Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed
|
||||||
* by a Mongo DB {@link com.mongodb.client.FindIterable}.
|
* by a Mongo DB {@link com.mongodb.client.FindIterable}.
|
||||||
* <p>
|
* <p>
|
||||||
* Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed.
|
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.FindIterable} that needs to
|
||||||
|
* be closed.
|
||||||
*
|
*
|
||||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
||||||
* specification. Must not be {@literal null}.
|
* specification. Must not be {@literal null}.
|
||||||
* @param entityType must not be {@literal null}.
|
* @param entityType must not be {@literal null}.
|
||||||
* @param collectionName must not be {@literal null} or empty.
|
* @param collectionName must not be {@literal null} or empty.
|
||||||
* @param <T> element return type
|
* @param <T> element return type
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return will never be {@literal null}.
|
||||||
* through a try-with-resources clause).
|
|
||||||
* @since 1.10
|
* @since 1.10
|
||||||
*/
|
*/
|
||||||
<T> Stream<T> stream(Query query, Class<T> entityType, String collectionName);
|
<T> CloseableIterator<T> stream(Query query, Class<T> entityType, String collectionName);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an uncapped collection with a name based on the provided entity class.
|
* Create an uncapped collection with a name based on the provided entity class.
|
||||||
@@ -285,58 +283,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
*/
|
*/
|
||||||
MongoCollection<Document> createCollection(String collectionName, @Nullable CollectionOptions collectionOptions);
|
MongoCollection<Document> createCollection(String collectionName, @Nullable CollectionOptions collectionOptions);
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline
|
|
||||||
* stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}.
|
|
||||||
*
|
|
||||||
* @param name the name of the view to create.
|
|
||||||
* @param source the type defining the views source collection.
|
|
||||||
* @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
default MongoCollection<Document> createView(String name, Class<?> source, AggregationOperation... stages) {
|
|
||||||
return createView(name, source, AggregationPipeline.of(stages));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on
|
|
||||||
* another collection or view identified by the given {@link #getCollectionName(Class) source type}.
|
|
||||||
*
|
|
||||||
* @param name the name of the view to create.
|
|
||||||
* @param source the type defining the views source collection.
|
|
||||||
* @param pipeline the {@link AggregationPipeline} defining the view content.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
default MongoCollection<Document> createView(String name, Class<?> source, AggregationPipeline pipeline) {
|
|
||||||
return createView(name, source, pipeline, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on
|
|
||||||
* another collection or view identified by the given {@link #getCollectionName(Class) source type}.
|
|
||||||
*
|
|
||||||
* @param name the name of the view to create.
|
|
||||||
* @param source the type defining the views source collection.
|
|
||||||
* @param pipeline the {@link AggregationPipeline} defining the view content.
|
|
||||||
* @param options additional settings to apply when creating the view. Can be {@literal null}.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
MongoCollection<Document> createView(String name, Class<?> source, AggregationPipeline pipeline,
|
|
||||||
@Nullable ViewOptions options);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on
|
|
||||||
* another collection or view identified by the given source.
|
|
||||||
*
|
|
||||||
* @param name the name of the view to create.
|
|
||||||
* @param source the name of the collection or view defining the to be created views source.
|
|
||||||
* @param pipeline the {@link AggregationPipeline} defining the view content.
|
|
||||||
* @param options additional settings to apply when creating the view. Can be {@literal null}.
|
|
||||||
* @since 4.0
|
|
||||||
*/
|
|
||||||
MongoCollection<Document> createView(String name, String source, AggregationPipeline pipeline,
|
|
||||||
@Nullable ViewOptions options);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A set of collection names.
|
* A set of collection names.
|
||||||
*
|
*
|
||||||
@@ -470,6 +416,43 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
*/
|
*/
|
||||||
<T> List<T> findAll(Class<T> entityClass, String collectionName);
|
<T> List<T> findAll(Class<T> entityClass, String collectionName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a group operation over the entire collection. The group operation entity class should match the 'shape' of
|
||||||
|
* the returned object that takes int account the initial document structure as well as any finalize functions.
|
||||||
|
*
|
||||||
|
* @param inputCollectionName the collection where the group operation will read from
|
||||||
|
* @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document,
|
||||||
|
* reduce function.
|
||||||
|
* @param entityClass The parametrized type of the returned list
|
||||||
|
* @return The results of the group operation
|
||||||
|
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||||
|
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||||
|
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
<T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a group operation restricting the rows to those which match the provided Criteria. The group operation
|
||||||
|
* entity class should match the 'shape' of the returned object that takes int account the initial document structure
|
||||||
|
* as well as any finalize functions.
|
||||||
|
*
|
||||||
|
* @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are
|
||||||
|
* considered.
|
||||||
|
* @param inputCollectionName the collection where the group operation will read from
|
||||||
|
* @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document,
|
||||||
|
* reduce function.
|
||||||
|
* @param entityClass The parametrized type of the returned list
|
||||||
|
* @return The results of the group operation
|
||||||
|
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||||
|
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||||
|
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} and
|
||||||
|
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation} instead.
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
<T> GroupByResults<T> group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy,
|
||||||
|
Class<T> entityClass);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the
|
* Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the
|
||||||
* inputCollection is derived from the inputType of the aggregation.
|
* inputCollection is derived from the inputType of the aggregation.
|
||||||
@@ -524,9 +507,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
/**
|
/**
|
||||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||||
* <p>
|
* <p>
|
||||||
* Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be
|
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||||
* closed. The raw results will be mapped to the given entity class. The name of the inputCollection is derived from
|
* needs to be closed. The raw results will be mapped to the given entity class. The name of the inputCollection is
|
||||||
* the inputType of the aggregation.
|
* derived from the inputType of the aggregation.
|
||||||
* <p>
|
* <p>
|
||||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||||
@@ -535,37 +518,31 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
* {@literal null}.
|
* {@literal null}.
|
||||||
* @param collectionName The name of the input collection to use for the aggreation.
|
* @param collectionName The name of the input collection to use for the aggreation.
|
||||||
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return The results of the aggregation operation.
|
||||||
* through a try-with-resources clause).
|
|
||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
<O> Stream<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. <br />
|
||||||
* <p>
|
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||||
* Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be
|
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||||
* closed. The raw results will be mapped to the given entity class and are returned as stream. The name of the
|
* of the inputCollection is derived from the inputType of the aggregation. <br />
|
||||||
* inputCollection is derived from the inputType of the aggregation.
|
|
||||||
* <p>
|
|
||||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||||
*
|
*
|
||||||
* @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be
|
* @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be
|
||||||
* {@literal null}.
|
* {@literal null}.
|
||||||
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return The results of the aggregation operation.
|
||||||
* through a try-with-resources clause).
|
|
||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
<O> Stream<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. <br />
|
||||||
* <p>
|
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||||
* Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be
|
* needs to be closed. The raw results will be mapped to the given entity class. <br />
|
||||||
* closed. The raw results will be mapped to the given entity class.
|
|
||||||
* <p>
|
|
||||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||||
*
|
*
|
||||||
@@ -574,18 +551,15 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
* @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or
|
* @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or
|
||||||
* empty.
|
* empty.
|
||||||
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return The results of the aggregation operation.
|
||||||
* through a try-with-resources clause).
|
|
||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
<O> Stream<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. <br />
|
||||||
* <p>
|
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||||
* Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be
|
* needs to be closed. The raw results will be mapped to the given entity class. <br />
|
||||||
* closed. The raw results will be mapped to the given entity class.
|
|
||||||
* <p>
|
|
||||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||||
*
|
*
|
||||||
@@ -594,11 +568,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
* @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or
|
* @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or
|
||||||
* empty.
|
* empty.
|
||||||
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
* @param outputType The parametrized type of the returned list, must not be {@literal null}.
|
||||||
* @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g.
|
* @return The results of the aggregation operation.
|
||||||
* through a try-with-resources clause).
|
|
||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
<O> Stream<O> aggregateStream(Aggregation aggregation, String collectionName, Class<O> outputType);
|
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, String collectionName, Class<O> outputType);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE
|
* Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE
|
||||||
@@ -806,57 +779,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
*/
|
*/
|
||||||
<T> List<T> find(Query query, Class<T> entityClass, String collectionName);
|
<T> List<T> find(Query query, Class<T> entityClass, String collectionName);
|
||||||
|
|
||||||
/**
|
|
||||||
* Query for a window of objects of type T from the specified collection. <br />
|
|
||||||
* Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with
|
|
||||||
* {@link Query#limit(int)} to limit large query results for efficient scrolling. <br />
|
|
||||||
* Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
|
||||||
* Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
|
||||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
|
||||||
* to map objects since the test for class type is done in the client and not on the server.
|
|
||||||
* <p>
|
|
||||||
* When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort
|
|
||||||
* sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or
|
|
||||||
* {@code null} values through {@code $gt/$lt} operators.
|
|
||||||
*
|
|
||||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
|
||||||
* specification. Must not be {@literal null}.
|
|
||||||
* @param entityType the parametrized type of the returned window.
|
|
||||||
* @return the converted window.
|
|
||||||
* @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid
|
|
||||||
* position.
|
|
||||||
* @since 4.1
|
|
||||||
* @see Query#with(org.springframework.data.domain.OffsetScrollPosition)
|
|
||||||
* @see Query#with(org.springframework.data.domain.KeysetScrollPosition)
|
|
||||||
*/
|
|
||||||
<T> Window<T> scroll(Query query, Class<T> entityType);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Query for a window of objects of type T from the specified collection. <br />
|
|
||||||
* Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with
|
|
||||||
* {@link Query#limit(int)} to limit large query results for efficient scrolling. <br />
|
|
||||||
* Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
|
||||||
* Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
|
||||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
|
||||||
* to map objects since the test for class type is done in the client and not on the server.
|
|
||||||
* <p>
|
|
||||||
* When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort
|
|
||||||
* sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or
|
|
||||||
* {@code null} values through {@code $gt/$lt} operators.
|
|
||||||
*
|
|
||||||
* @param query the query class that specifies the criteria used to find a record and also an optional fields
|
|
||||||
* specification. Must not be {@literal null}.
|
|
||||||
* @param entityType the parametrized type of the returned window.
|
|
||||||
* @param collectionName name of the collection to retrieve the objects from.
|
|
||||||
* @return the converted window.
|
|
||||||
* @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid
|
|
||||||
* position.
|
|
||||||
* @since 4.1
|
|
||||||
* @see Query#with(org.springframework.data.domain.OffsetScrollPosition)
|
|
||||||
* @see Query#with(org.springframework.data.domain.KeysetScrollPosition)
|
|
||||||
*/
|
|
||||||
<T> Window<T> scroll(Query query, Class<T> entityType, String collectionName);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a document with the given id mapped onto the given class. The collection the query is ran against will be
|
* Returns a document with the given id mapped onto the given class. The collection the query is ran against will be
|
||||||
* derived from the given target class as well.
|
* derived from the given target class as well.
|
||||||
@@ -1097,7 +1019,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
@Nullable
|
@Nullable
|
||||||
default <T> T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) {
|
default <T> T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) {
|
||||||
|
|
||||||
Assert.notNull(replacement, "Replacement must not be null");
|
Assert.notNull(replacement, "Replacement must not be null!");
|
||||||
return findAndReplace(query, replacement, options, (Class<T>) ClassUtils.getUserClass(replacement), collectionName);
|
return findAndReplace(query, replacement, options, (Class<T>) ClassUtils.getUserClass(replacement), collectionName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1230,7 +1152,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||||
* @return the count of matching documents.
|
* @return the count of matching documents.
|
||||||
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
|
* @throws org.springframework.data.mapping.MappingException if the collection name cannot be
|
||||||
* {@link #getCollectionName(Class) derived} from the given type.
|
* {@link #getCollectionName(Class) derived} from the given type.
|
||||||
* @see #exactCount(Query, Class)
|
* @see #exactCount(Query, Class)
|
||||||
* @see #estimatedCount(Class)
|
* @see #estimatedCount(Class)
|
||||||
*/
|
*/
|
||||||
@@ -1292,7 +1214,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
*/
|
*/
|
||||||
default long estimatedCount(Class<?> entityClass) {
|
default long estimatedCount(Class<?> entityClass) {
|
||||||
|
|
||||||
Assert.notNull(entityClass, "Entity class must not be null");
|
Assert.notNull(entityClass, "Entity class must not be null!");
|
||||||
return estimatedCount(getCollectionName(entityClass));
|
return estimatedCount(getCollectionName(entityClass));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1678,7 +1600,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
DeleteResult remove(Object object, String collectionName);
|
DeleteResult remove(Object object, String collectionName);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove all documents that match the provided query document criteria from the collection used to store the
|
* Remove all documents that match the provided query document criteria from the the collection used to store the
|
||||||
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
|
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
|
||||||
*
|
*
|
||||||
* @param query the query document that specifies the criteria used to remove a record.
|
* @param query the query document that specifies the criteria used to remove a record.
|
||||||
@@ -1691,7 +1613,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
DeleteResult remove(Query query, Class<?> entityClass);
|
DeleteResult remove(Query query, Class<?> entityClass);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove all documents that match the provided query document criteria from the collection used to store the
|
* Remove all documents that match the provided query document criteria from the the collection used to store the
|
||||||
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
|
* entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query.
|
||||||
*
|
*
|
||||||
* @param query the query document that specifies the criteria used to remove a record.
|
* @param query the query document that specifies the criteria used to remove a record.
|
||||||
@@ -1741,9 +1663,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
|||||||
<T> List<T> findAllAndRemove(Query query, Class<T> entityClass);
|
<T> List<T> findAllAndRemove(Query query, Class<T> entityClass);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns and removes all documents that match the provided query document criteria from the collection used to store
|
* Returns and removes all documents that match the provided query document criteria from the the collection used to
|
||||||
* the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the
|
* store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in
|
||||||
* query.
|
* the query.
|
||||||
*
|
*
|
||||||
* @param query the query document that specifies the criteria used to find and remove documents.
|
* @param query the query document that specifies the criteria used to find and remove documents.
|
||||||
* @param entityClass class of the pojo to be operated on.
|
* @param entityClass class of the pojo to be operated on.
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -21,7 +21,6 @@ import java.util.Map.Entry;
|
|||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
@@ -29,7 +28,6 @@ import java.util.stream.Collectors;
|
|||||||
import org.bson.BsonValue;
|
import org.bson.BsonValue;
|
||||||
import org.bson.Document;
|
import org.bson.Document;
|
||||||
import org.bson.codecs.Codec;
|
import org.bson.codecs.Codec;
|
||||||
import org.bson.conversions.Bson;
|
|
||||||
import org.bson.types.ObjectId;
|
import org.bson.types.ObjectId;
|
||||||
import org.springframework.data.mapping.PropertyPath;
|
import org.springframework.data.mapping.PropertyPath;
|
||||||
import org.springframework.data.mapping.PropertyReferenceException;
|
import org.springframework.data.mapping.PropertyReferenceException;
|
||||||
@@ -54,7 +52,6 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
|||||||
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
import org.springframework.data.mongodb.core.mapping.ShardKey;
|
||||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||||
import org.springframework.data.mongodb.core.query.Collation;
|
import org.springframework.data.mongodb.core.query.Collation;
|
||||||
import org.springframework.data.mongodb.core.query.Meta;
|
|
||||||
import org.springframework.data.mongodb.core.query.Query;
|
import org.springframework.data.mongodb.core.query.Query;
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||||
@@ -63,6 +60,7 @@ import org.springframework.data.projection.EntityProjection;
|
|||||||
import org.springframework.data.util.Lazy;
|
import org.springframework.data.util.Lazy;
|
||||||
import org.springframework.lang.Nullable;
|
import org.springframework.lang.Nullable;
|
||||||
import org.springframework.util.ClassUtils;
|
import org.springframework.util.ClassUtils;
|
||||||
|
import org.springframework.util.StringUtils;
|
||||||
|
|
||||||
import com.mongodb.client.model.CountOptions;
|
import com.mongodb.client.model.CountOptions;
|
||||||
import com.mongodb.client.model.DeleteOptions;
|
import com.mongodb.client.model.DeleteOptions;
|
||||||
@@ -390,12 +388,12 @@ class QueryOperations {
|
|||||||
|
|
||||||
for (Entry<String, Object> entry : fields.entrySet()) {
|
for (Entry<String, Object> entry : fields.entrySet()) {
|
||||||
|
|
||||||
if (entry.getValue()instanceof MongoExpression mongoExpression) {
|
if (entry.getValue() instanceof MongoExpression) {
|
||||||
|
|
||||||
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
|
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
|
||||||
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
|
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
|
||||||
|
|
||||||
evaluated.put(entry.getKey(), AggregationExpression.from(mongoExpression).toDocument(ctx));
|
evaluated.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||||
} else {
|
} else {
|
||||||
evaluated.put(entry.getKey(), entry.getValue());
|
evaluated.put(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
@@ -458,7 +456,7 @@ class QueryOperations {
|
|||||||
*/
|
*/
|
||||||
private DistinctQueryContext(@Nullable Object query, String fieldName) {
|
private DistinctQueryContext(@Nullable Object query, String fieldName) {
|
||||||
|
|
||||||
super(query instanceof Document document ? new BasicQuery(document) : (Query) query);
|
super(query instanceof Document ? new BasicQuery((Document) query) : (Query) query);
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -566,27 +564,17 @@ class QueryOperations {
|
|||||||
if (query.getLimit() > 0) {
|
if (query.getLimit() > 0) {
|
||||||
options.limit(query.getLimit());
|
options.limit(query.getLimit());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (query.getSkip() > 0) {
|
if (query.getSkip() > 0) {
|
||||||
options.skip((int) query.getSkip());
|
options.skip((int) query.getSkip());
|
||||||
}
|
}
|
||||||
|
if (StringUtils.hasText(query.getHint())) {
|
||||||
|
|
||||||
Meta meta = query.getMeta();
|
String hint = query.getHint();
|
||||||
if (meta.hasValues()) {
|
if (BsonUtils.isJsonDocument(hint)) {
|
||||||
|
options.hint(BsonUtils.parse(hint, codecRegistryProvider));
|
||||||
if (meta.hasMaxTime()) {
|
} else {
|
||||||
options.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS);
|
options.hintString(hint);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (meta.hasComment()) {
|
|
||||||
options.comment(meta.getComment());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
HintFunction hintFunction = HintFunction.from(query.getHint());
|
|
||||||
|
|
||||||
if (hintFunction.isPresent()) {
|
|
||||||
options = hintFunction.apply(codecRegistryProvider, options::hintString, options::hint);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (callback != null) {
|
if (callback != null) {
|
||||||
@@ -731,7 +719,6 @@ class QueryOperations {
|
|||||||
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
.arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()));
|
||||||
}
|
}
|
||||||
|
|
||||||
HintFunction.from(getQuery().getHint()).ifPresent(codecRegistryProvider, options::hintString, options::hint);
|
|
||||||
applyCollation(domainType, options::collation);
|
applyCollation(domainType, options::collation);
|
||||||
|
|
||||||
if (callback != null) {
|
if (callback != null) {
|
||||||
@@ -792,7 +779,7 @@ class QueryOperations {
|
|||||||
|
|
||||||
Document filterWithShardKey = new Document(filter);
|
Document filterWithShardKey = new Document(filter);
|
||||||
getMappedShardKeyFields(domainType)
|
getMappedShardKeyFields(domainType)
|
||||||
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue((Bson) shardKeySource, key)));
|
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key)));
|
||||||
|
|
||||||
return filterWithShardKey;
|
return filterWithShardKey;
|
||||||
}
|
}
|
||||||
@@ -922,10 +909,10 @@ class QueryOperations {
|
|||||||
|
|
||||||
this.aggregation = aggregation;
|
this.aggregation = aggregation;
|
||||||
|
|
||||||
if (aggregation instanceof TypedAggregation typedAggregation) {
|
if (aggregation instanceof TypedAggregation) {
|
||||||
this.inputType = typedAggregation.getInputType();
|
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||||
} else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext typeBasedAggregationOperationContext) {
|
} else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext) {
|
||||||
this.inputType = typeBasedAggregationOperationContext.getType();
|
this.inputType = ((TypeBasedAggregationOperationContext) aggregationOperationContext).getType();
|
||||||
} else {
|
} else {
|
||||||
this.inputType = null;
|
this.inputType = null;
|
||||||
}
|
}
|
||||||
@@ -950,8 +937,8 @@ class QueryOperations {
|
|||||||
|
|
||||||
this.aggregation = aggregation;
|
this.aggregation = aggregation;
|
||||||
|
|
||||||
if (aggregation instanceof TypedAggregation typedAggregation) {
|
if (aggregation instanceof TypedAggregation) {
|
||||||
this.inputType = typedAggregation.getInputType();
|
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||||
} else {
|
} else {
|
||||||
this.inputType = inputType;
|
this.inputType = inputType;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,15 +41,19 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
|
|||||||
*/
|
*/
|
||||||
ReactiveAggregationOperationSupport(ReactiveMongoTemplate template) {
|
ReactiveAggregationOperationSupport(ReactiveMongoTemplate template) {
|
||||||
|
|
||||||
Assert.notNull(template, "Template must not be null");
|
Assert.notNull(template, "Template must not be null!");
|
||||||
|
|
||||||
this.template = template;
|
this.template = template;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ReactiveAggregationOperation#aggregateAndReturn(java.lang.Class)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public <T> ReactiveAggregation<T> aggregateAndReturn(Class<T> domainType) {
|
public <T> ReactiveAggregation<T> aggregateAndReturn(Class<T> domainType) {
|
||||||
|
|
||||||
Assert.notNull(domainType, "DomainType must not be null");
|
Assert.notNull(domainType, "DomainType must not be null!");
|
||||||
|
|
||||||
return new ReactiveAggregationSupport<>(template, domainType, null, null);
|
return new ReactiveAggregationSupport<>(template, domainType, null, null);
|
||||||
}
|
}
|
||||||
@@ -71,22 +75,34 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
|
|||||||
this.collection = collection;
|
this.collection = collection;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithCollection#inCollection(java.lang.String)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public AggregationOperationWithAggregation<T> inCollection(String collection) {
|
public AggregationOperationWithAggregation<T> inCollection(String collection) {
|
||||||
|
|
||||||
Assert.hasText(collection, "Collection must not be null nor empty");
|
Assert.hasText(collection, "Collection must not be null nor empty!");
|
||||||
|
|
||||||
return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection);
|
return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithAggregation#by(org.springframework.data.mongodb.core.Aggregation)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public TerminatingAggregationOperation<T> by(Aggregation aggregation) {
|
public TerminatingAggregationOperation<T> by(Aggregation aggregation) {
|
||||||
|
|
||||||
Assert.notNull(aggregation, "Aggregation must not be null");
|
Assert.notNull(aggregation, "Aggregation must not be null!");
|
||||||
|
|
||||||
return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection);
|
return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* (non-Javadoc)
|
||||||
|
* @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.TerminatingAggregationOperation#all()
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Flux<T> all() {
|
public Flux<T> all() {
|
||||||
return template.aggregate(aggregation, getCollectionName(aggregation), domainType);
|
return template.aggregate(aggregation, getCollectionName(aggregation), domainType);
|
||||||
@@ -98,7 +114,9 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
|
|||||||
return collection;
|
return collection;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (aggregation instanceof TypedAggregation typedAggregation) {
|
if (aggregation instanceof TypedAggregation) {
|
||||||
|
|
||||||
|
TypedAggregation<?> typedAggregation = (TypedAggregation<?>) aggregation;
|
||||||
|
|
||||||
if (typedAggregation.getInputType() != null) {
|
if (typedAggregation.getInputType() != null) {
|
||||||
return template.getCollectionName(typedAggregation.getInputType());
|
return template.getCollectionName(typedAggregation.getInputType());
|
||||||
|
|||||||
@@ -1,138 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2023 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* https://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.springframework.data.mongodb.core;
|
|
||||||
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.springframework.data.mongodb.core.query.Query;
|
|
||||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
|
||||||
|
|
||||||
import com.mongodb.bulk.BulkWriteResult;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and
|
|
||||||
* make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single
|
|
||||||
* operations or list of similar operations in sequence which can then eventually be executed by calling
|
|
||||||
* {@link #execute()}.
|
|
||||||
*
|
|
||||||
* <pre class="code">
|
|
||||||
* ReactiveMongoOperations ops = …;
|
|
||||||
*
|
|
||||||
* ops.bulkOps(BulkMode.UNORDERED, Person.class)
|
|
||||||
* .insert(newPerson)
|
|
||||||
* .updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
|
|
||||||
* .execute();
|
|
||||||
* </pre>
|
|
||||||
* <p>
|
|
||||||
* Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations
|
|
||||||
* that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and
|
|
||||||
* the version field remains not populated.
|
|
||||||
*
|
|
||||||
* @author Christoph Strobl
|
|
||||||
* @since 4.1
|
|
||||||
*/
|
|
||||||
public interface ReactiveBulkOperations {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single insert to the bulk operation.
|
|
||||||
*
|
|
||||||
* @param documents the document to insert, must not be {@literal null}.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations insert(Object documents);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a list of inserts to the bulk operation.
|
|
||||||
*
|
|
||||||
* @param documents List of documents to insert, must not be {@literal null}.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations insert(List<? extends Object> documents);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single update to the bulk operation. For the update request, only the first matching document is updated.
|
|
||||||
*
|
|
||||||
* @param query update criteria, must not be {@literal null}.
|
|
||||||
* @param update {@link UpdateDefinition} operation to perform, must not be {@literal null}.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations updateOne(Query query, UpdateDefinition update);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single update to the bulk operation. For the update request, all matching documents are updated.
|
|
||||||
*
|
|
||||||
* @param query Update criteria.
|
|
||||||
* @param update Update operation to perform.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty,
|
|
||||||
* else an insert.
|
|
||||||
*
|
|
||||||
* @param query Update criteria.
|
|
||||||
* @param update Update operation to perform.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations upsert(Query query, UpdateDefinition update);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single remove operation to the bulk operation.
|
|
||||||
*
|
|
||||||
* @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations remove(Query remove);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a list of remove operations to the bulk operation.
|
|
||||||
*
|
|
||||||
* @param removes the remove operations to perform, must not be {@literal null}.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations remove(List<Query> removes);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single replace operation to the bulk operation.
|
|
||||||
*
|
|
||||||
* @param query Update criteria.
|
|
||||||
* @param replacement the replacement document. Must not be {@literal null}.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
default ReactiveBulkOperations replaceOne(Query query, Object replacement) {
|
|
||||||
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single replace operation to the bulk operation.
|
|
||||||
*
|
|
||||||
* @param query Update criteria.
|
|
||||||
* @param replacement the replacement document. Must not be {@literal null}.
|
|
||||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
|
||||||
* @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}.
|
|
||||||
*/
|
|
||||||
ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute all bulk operations using the default write concern.
|
|
||||||
*
|
|
||||||
* @return a {@link Mono} emitting the result of the bulk operation providing counters for inserts/updates etc.
|
|
||||||
*/
|
|
||||||
Mono<BulkWriteResult> execute();
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user