Compare commits
214 Commits
2.2.0.RC2
...
2.1.14.REL
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e51d005d5 | ||
|
|
1940a5c2c2 | ||
|
|
5a274029d7 | ||
|
|
c342bf266e | ||
|
|
11baf455d2 | ||
|
|
05882813ac | ||
|
|
bd3f26c928 | ||
|
|
5555aa970b | ||
|
|
e74fe05abd | ||
|
|
d579254fbc | ||
|
|
71c8e4cc02 | ||
|
|
a087c7d17c | ||
|
|
90cec275a6 | ||
|
|
e4eefe577d | ||
|
|
b57a6612f6 | ||
|
|
62b2d54e0d | ||
|
|
aff823da57 | ||
|
|
d45b630724 | ||
|
|
fc8c97aeb0 | ||
|
|
004e7f01b2 | ||
|
|
5c80ee0087 | ||
|
|
adb9dc29a2 | ||
|
|
6eb6feadbb | ||
|
|
166aab39c4 | ||
|
|
fa94c22c2a | ||
|
|
e0f88a8b84 | ||
|
|
59aa8051d3 | ||
|
|
205a06e79a | ||
|
|
899b43a29b | ||
|
|
0f0a4ed31b | ||
|
|
9acc8d5268 | ||
|
|
313ffb5426 | ||
|
|
dc859953f4 | ||
|
|
bc29f2b24e | ||
|
|
686cdac73f | ||
|
|
b7b339577b | ||
|
|
2166a6e953 | ||
|
|
3c601a699a | ||
|
|
37211fc6d7 | ||
|
|
a45c9040c4 | ||
|
|
23c0a07b93 | ||
|
|
f3a7d6a20e | ||
|
|
0d22d831f8 | ||
|
|
6b0e2ab5de | ||
|
|
5d02b84856 | ||
|
|
93e911985e | ||
|
|
e7faa1a1ec | ||
|
|
631714941a | ||
|
|
db9428cebe | ||
|
|
4be53ac952 | ||
|
|
564acd75d5 | ||
|
|
95ccdf4c20 | ||
|
|
291ef4bb75 | ||
|
|
c7461928f4 | ||
|
|
f5a5d3e96b | ||
|
|
b213aada80 | ||
|
|
403e5043cb | ||
|
|
bdbda459c0 | ||
|
|
0bf6d5f7fa | ||
|
|
f2ae14206a | ||
|
|
049159374d | ||
|
|
79f8e06fc1 | ||
|
|
370db2dce5 | ||
|
|
74325d5193 | ||
|
|
e6ea2e1379 | ||
|
|
cb85f3cfa6 | ||
|
|
aff8b89006 | ||
|
|
0ad8857368 | ||
|
|
46de82fe0b | ||
|
|
387348b615 | ||
|
|
8fd41faac6 | ||
|
|
8a15e1086b | ||
|
|
8502786648 | ||
|
|
d7107d49bf | ||
|
|
f42cb1e2f0 | ||
|
|
a9403b526f | ||
|
|
5f6291ed32 | ||
|
|
676ee80434 | ||
|
|
b54641ff86 | ||
|
|
6930c720ca | ||
|
|
611cfe9c11 | ||
|
|
507a1fbf34 | ||
|
|
087649de35 | ||
|
|
1f01f34377 | ||
|
|
295c43c6ff | ||
|
|
5a62d449bf | ||
|
|
1cbbe692b5 | ||
|
|
5bfe125160 | ||
|
|
1b6722324e | ||
|
|
a212f5f79d | ||
|
|
2879348d4b | ||
|
|
10097311c7 | ||
|
|
b8303a56b6 | ||
|
|
f9e468aebb | ||
|
|
b900dc6c09 | ||
|
|
bede55714c | ||
|
|
3ec426352f | ||
|
|
c6293e0ebd | ||
|
|
74e49a2326 | ||
|
|
69c451f69f | ||
|
|
9af8160e05 | ||
|
|
fdf4ea1e60 | ||
|
|
8c7afe012f | ||
|
|
6ba258a1f3 | ||
|
|
059c8cf1dd | ||
|
|
2b8955f583 | ||
|
|
23fde167f6 | ||
|
|
9470f82e9b | ||
|
|
1e88e241d4 | ||
|
|
0b8396c43c | ||
|
|
b602e4cb26 | ||
|
|
500393e596 | ||
|
|
7e4cbdb8b0 | ||
|
|
1d6d8ff8e6 | ||
|
|
8ea4cbe9ea | ||
|
|
45a0c36184 | ||
|
|
599c79bce2 | ||
|
|
eda6d40aa7 | ||
|
|
22b844c87f | ||
|
|
bdf7ec7c9b | ||
|
|
13db06d345 | ||
|
|
365ecd53c4 | ||
|
|
dc40c42815 | ||
|
|
49415efb8c | ||
|
|
dc234906f4 | ||
|
|
a7f51a7c85 | ||
|
|
9b0bd11d09 | ||
|
|
d7ad883f69 | ||
|
|
44308bfbe1 | ||
|
|
9b673d342f | ||
|
|
5517198310 | ||
|
|
819a04f3db | ||
|
|
f7202067a5 | ||
|
|
f20a0f20c9 | ||
|
|
02216d5941 | ||
|
|
79f2094322 | ||
|
|
afbc5cfa25 | ||
|
|
a3882a5e5c | ||
|
|
8194772388 | ||
|
|
12f18850dc | ||
|
|
816c1da248 | ||
|
|
5a78f19781 | ||
|
|
698837921b | ||
|
|
0f7fc7880b | ||
|
|
6e42f49b08 | ||
|
|
bdfe4e99ed | ||
|
|
85aa3927a6 | ||
|
|
33c4e4294f | ||
|
|
a89ab387cc | ||
|
|
e52b8c9d38 | ||
|
|
4dbf4795db | ||
|
|
8e4c6f68ae | ||
|
|
fddbd126ea | ||
|
|
ee5b26ab1c | ||
|
|
01e9a2ed67 | ||
|
|
10107c7b81 | ||
|
|
abe7876086 | ||
|
|
a759dff5fd | ||
|
|
9f8d081ef3 | ||
|
|
b8f6030441 | ||
|
|
267decf189 | ||
|
|
3a7492c68d | ||
|
|
273088b6a8 | ||
|
|
723b481f82 | ||
|
|
8a34bc46a2 | ||
|
|
bb4c16f4cd | ||
|
|
cf5b7c9763 | ||
|
|
f4414e98a2 | ||
|
|
a97bfd2a37 | ||
|
|
9fe0f5c984 | ||
|
|
718a7ffe8c | ||
|
|
f7106dc425 | ||
|
|
0698f8bcb8 | ||
|
|
3effd9ae6f | ||
|
|
7002cd1456 | ||
|
|
a15d488657 | ||
|
|
44651581b1 | ||
|
|
6d64f5b2b2 | ||
|
|
0c52a29ba8 | ||
|
|
bd8bd4f568 | ||
|
|
c75f29dc42 | ||
|
|
e493af7266 | ||
|
|
8d892e5924 | ||
|
|
053299f243 | ||
|
|
872659cc00 | ||
|
|
96978a6194 | ||
|
|
2253d3e301 | ||
|
|
5982ee84f7 | ||
|
|
dd2af6462d | ||
|
|
622643bf24 | ||
|
|
51cc55baac | ||
|
|
0b106e5649 | ||
|
|
8975d93ab3 | ||
|
|
e25b6c49f5 | ||
|
|
7a70c205de | ||
|
|
6045efa450 | ||
|
|
7b0816b3ee | ||
|
|
14e4ea736d | ||
|
|
32e7d9ab7f | ||
|
|
7f35ad9e45 | ||
|
|
60228f6e5a | ||
|
|
7604492b7f | ||
|
|
4680fe0e77 | ||
|
|
b4228c88d3 | ||
|
|
f6ef8c94c8 | ||
|
|
0d0dafa85e | ||
|
|
29aa34619f | ||
|
|
7f19f769c4 | ||
|
|
a40e89d90a | ||
|
|
6b2350200a | ||
|
|
fb50b0f6e7 | ||
|
|
ab568229b5 | ||
|
|
7f9c1bd774 | ||
|
|
670a0978da |
@@ -16,12 +16,9 @@ before_install:
|
||||
|
||||
env:
|
||||
matrix:
|
||||
- MONGO_VERSION=4.1.10
|
||||
- MONGO_VERSION=4.0.4
|
||||
- MONGO_VERSION=3.6.12
|
||||
- MONGO_VERSION=3.4.20
|
||||
global:
|
||||
- PROFILE=ci
|
||||
global:
|
||||
- MONGO_VERSION=4.0.0
|
||||
|
||||
addons:
|
||||
apt:
|
||||
|
||||
281
Jenkinsfile
vendored
281
Jenkinsfile
vendored
@@ -1,184 +1,123 @@
|
||||
pipeline {
|
||||
agent none
|
||||
agent none
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/2.1.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(numToKeepStr: '14'))
|
||||
}
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(numToKeepStr: '14'))
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
stages {
|
||||
stage("Test") {
|
||||
when {
|
||||
anyOf {
|
||||
branch '2.1.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: baseline") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.1') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.1/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
}
|
||||
}
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.1", "ci/openjdk8-mongodb-4.1/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
branch 'issue/*'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
stage("test: baseline") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.0:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -B'
|
||||
}
|
||||
}
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
parallel {
|
||||
stage("test: mongodb 4.1") {
|
||||
agent {
|
||||
docker {
|
||||
image 'springci/spring-data-openjdk8-with-mongodb-4.1:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Dsort -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
stage('Release to artifactory with docs') {
|
||||
when {
|
||||
branch '2.1.x'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -B'
|
||||
}
|
||||
}
|
||||
steps {
|
||||
sh 'rm -rf ?'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.staging-repository=libs-snapshot-local " +
|
||||
"-Dartifactory.build-name=spring-data-mongodb-2.1 " +
|
||||
"-Dartifactory.build-number=${BUILD_NUMBER} " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch 'master'
|
||||
}
|
||||
agent {
|
||||
docker {
|
||||
image 'adoptopenjdk/openjdk8:latest'
|
||||
label 'data'
|
||||
args '-v $HOME:/tmp/jenkins-home'
|
||||
}
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
'-Dmaven.test.skip=true clean deploy -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
changed {
|
||||
script {
|
||||
slackSend(
|
||||
color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
|
||||
channel: '#spring-data-dev',
|
||||
message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
|
||||
emailext(
|
||||
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
|
||||
mimeType: 'text/html',
|
||||
recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']],
|
||||
body: "<a href=\"${env.BUILD_URL}\">${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
changed {
|
||||
script {
|
||||
slackSend(
|
||||
color: (currentBuild.currentResult == 'SUCCESS') ? 'good' : 'danger',
|
||||
channel: '#spring-data-dev',
|
||||
message: "${currentBuild.fullDisplayName} - `${currentBuild.currentResult}`\n${env.BUILD_URL}")
|
||||
emailext(
|
||||
subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}",
|
||||
mimeType: 'text/html',
|
||||
recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']],
|
||||
body: "<a href=\"${env.BUILD_URL}\">${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}</a>")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly.
|
||||
@@ -1,39 +0,0 @@
|
||||
== Running CI tasks locally
|
||||
|
||||
Since Concourse is built on top of Docker, it's easy to:
|
||||
|
||||
* Debug what went wrong on your local machine.
|
||||
* Test out a a tweak to your `test.sh` script before sending it out.
|
||||
* Experiment against a new image before submitting your pull request.
|
||||
|
||||
All of these use cases are great reasons to essentially run what Concourse does on your local machine.
|
||||
|
||||
IMPORTANT: To do this you must have Docker installed on your machine.
|
||||
|
||||
1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github`.
|
||||
+
|
||||
Next, run the `test.sh` script from inside the container:
|
||||
+
|
||||
2. `PROFILE=none spring-data-mongodb-github/ci/test.sh`
|
||||
|
||||
Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs.
|
||||
|
||||
If you need to test the `build.sh` script, do this:
|
||||
|
||||
1. `mkdir /tmp/spring-data-mongodb-artifactory`
|
||||
2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-8-jdk-with-mongodb /bin/bash`
|
||||
+
|
||||
This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary
|
||||
artifactory output directory at `spring-data-mongodb-artifactory`.
|
||||
+
|
||||
Next, run the `build.sh` script from inside the container:
|
||||
+
|
||||
3. `spring-data-mongodb-github/ci/build.sh`
|
||||
|
||||
IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything.
|
||||
It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts
|
||||
and deliver them to artifactory.
|
||||
|
||||
NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images.
|
||||
35
pom.xml
35
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC2</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,20 +15,21 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.2.0.RC2</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
</modules>
|
||||
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.2.0.RC2</springdata.commons>
|
||||
<mongo>3.11.0-rc0</mongo>
|
||||
<mongo.reactivestreams>1.12.0-rc0</mongo.reactivestreams>
|
||||
<springdata.commons>2.1.14.RELEASE</springdata.commons>
|
||||
<mongo>3.8.2</mongo>
|
||||
<mongo.reactivestreams>1.9.2</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
|
||||
@@ -117,10 +118,30 @@
|
||||
<id>benchmarks</id>
|
||||
<modules>
|
||||
<module>spring-data-mongodb</module>
|
||||
<module>spring-data-mongodb-cross-store</module>
|
||||
<module>spring-data-mongodb-distribution</module>
|
||||
<module>spring-data-mongodb-benchmarks</module>
|
||||
</modules>
|
||||
</profile>
|
||||
|
||||
<profile>
|
||||
<id>distribute</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
</profiles>
|
||||
|
||||
<dependencies>
|
||||
@@ -134,8 +155,8 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC2</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,7 +87,6 @@
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<testSourceDirectory>${project.build.sourceDirectory}</testSourceDirectory>
|
||||
<testClassesDirectory>${project.build.outputDirectory}</testClassesDirectory>
|
||||
<excludes>
|
||||
|
||||
7
spring-data-mongodb-cross-store/aop.xml
Normal file
7
spring-data-mongodb-cross-store/aop.xml
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<aspectj>
|
||||
<aspects>
|
||||
<aspect name="org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect" />
|
||||
<aspect name="org.springframework.data.mongodb.crossstore.MongoDocumentBacking" />
|
||||
</aspects>
|
||||
</aspectj>
|
||||
148
spring-data-mongodb-cross-store/pom.xml
Normal file
148
spring-data-mongodb-cross-store/pom.xml
Normal file
@@ -0,0 +1,148 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>spring-data-mongodb-cross-store</artifactId>
|
||||
<name>Spring Data MongoDB - Cross-Store Support</name>
|
||||
|
||||
<properties>
|
||||
<jpa>2.1.1</jpa>
|
||||
<hibernate>5.2.1.Final</hibernate>
|
||||
<java-module-name>spring.data.mongodb.cross.store</java-module-name>
|
||||
<project.root>${basedir}/..</project.root>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- Spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-beans</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-tx</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-orm</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring Data -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- JPA -->
|
||||
<dependency>
|
||||
<groupId>org.eclipse.persistence</groupId>
|
||||
<artifactId>javax.persistence</artifactId>
|
||||
<version>${jpa}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- For Tests -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-entitymanager</artifactId>
|
||||
<version>${hibernate}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>hsqldb</groupId>
|
||||
<artifactId>hsqldb</artifactId>
|
||||
<version>1.8.0.10</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.validation</groupId>
|
||||
<artifactId>validation-api</artifactId>
|
||||
<version>${validation}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.2.4.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>aspectj-maven-plugin</artifactId>
|
||||
<version>1.6</version>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjrt</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.aspectj</groupId>
|
||||
<artifactId>aspectjtools</artifactId>
|
||||
<version>${aspectj}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<outxml>true</outxml>
|
||||
<aspectLibraries>
|
||||
<aspectLibrary>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-aspects</artifactId>
|
||||
</aspectLibrary>
|
||||
</aspectLibraries>
|
||||
<complianceLevel>${source.level}</complianceLevel>
|
||||
<source>${source.level}</source>
|
||||
<target>${source.level}</target>
|
||||
<xmlConfigured>aop.xml</xmlConfigured>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,15 +13,16 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository;
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import lombok.Value;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Value
|
||||
class SumAge {
|
||||
@Deprecated
|
||||
public interface DocumentBacked extends ChangeSetBacked {
|
||||
|
||||
private Long total;
|
||||
}
|
||||
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManagerFactory;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.mongodb.core.CollectionCallback;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.Filters;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
|
||||
/**
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
* @author Alex Vengrovsk
|
||||
* @author Mark Paluch
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MongoChangeSetPersister implements ChangeSetPersister<Object> {
|
||||
|
||||
private static final String ENTITY_CLASS = "_entity_class";
|
||||
private static final String ENTITY_ID = "_entity_id";
|
||||
private static final String ENTITY_FIELD_NAME = "_entity_field_name";
|
||||
private static final String ENTITY_FIELD_CLASS = "_entity_field_class";
|
||||
|
||||
private final Logger log = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private MongoTemplate mongoTemplate;
|
||||
private EntityManagerFactory entityManagerFactory;
|
||||
|
||||
public void setMongoTemplate(MongoTemplate mongoTemplate) {
|
||||
this.mongoTemplate = mongoTemplate;
|
||||
}
|
||||
|
||||
public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) {
|
||||
this.entityManagerFactory = entityManagerFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public void getPersistentState(Class<? extends ChangeSetBacked> entityClass, Object id, final ChangeSet changeSet)
|
||||
throws DataAccessException, NotFoundException {
|
||||
|
||||
if (id == null) {
|
||||
log.debug("Unable to load MongoDB data for null id");
|
||||
return;
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entityClass);
|
||||
|
||||
final Document dbk = new Document();
|
||||
dbk.put(ENTITY_ID, id);
|
||||
dbk.put(ENTITY_CLASS, entityClass.getName());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Loading MongoDB data for {}", dbk);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
for (Document dbo : collection.find(dbk)) {
|
||||
String key = (String) dbo.get(ENTITY_FIELD_NAME);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Processing key: {}", key);
|
||||
}
|
||||
if (!changeSet.getValues().containsKey(key)) {
|
||||
String className = (String) dbo.get(ENTITY_FIELD_CLASS);
|
||||
if (className == null) {
|
||||
throw new DataIntegrityViolationException(
|
||||
"Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available");
|
||||
}
|
||||
Class<?> clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader());
|
||||
Object value = mongoTemplate.getConverter().read(clazz, dbo);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Adding to ChangeSet: {}", key);
|
||||
}
|
||||
changeSet.set(key, value);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("getPersistentId called on {}", entity);
|
||||
}
|
||||
if (entityManagerFactory == null) {
|
||||
throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null");
|
||||
}
|
||||
|
||||
return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet)
|
||||
*/
|
||||
public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException {
|
||||
if (cs == null) {
|
||||
log.debug("Flush: changeset was null, nothing to flush.");
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: changeset: {}", cs.getValues());
|
||||
}
|
||||
|
||||
String collName = getCollectionNameForEntity(entity.getClass());
|
||||
if (mongoTemplate.getCollection(collName) == null) {
|
||||
mongoTemplate.createCollection(collName);
|
||||
}
|
||||
|
||||
for (String key : cs.getValues().keySet()) {
|
||||
if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) {
|
||||
Object value = cs.getValues().get(key);
|
||||
final Document dbQuery = new Document();
|
||||
dbQuery.put(ENTITY_ID, getPersistentId(entity, cs));
|
||||
dbQuery.put(ENTITY_CLASS, entity.getClass().getName());
|
||||
dbQuery.put(ENTITY_FIELD_NAME, key);
|
||||
final Document dbId = mongoTemplate.execute(collName, new CollectionCallback<Document>() {
|
||||
public Document doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
Document id = collection.find(dbQuery).first();
|
||||
return id;
|
||||
}
|
||||
});
|
||||
|
||||
if (value == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: removing: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
DeleteResult dr = collection.deleteMany(dbQuery);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
final Document dbDoc = new Document();
|
||||
dbDoc.putAll(dbQuery);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Flush: saving: {}", dbQuery);
|
||||
}
|
||||
mongoTemplate.getConverter().write(value, dbDoc);
|
||||
dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName());
|
||||
if (dbId != null) {
|
||||
dbDoc.put("_id", dbId.get("_id"));
|
||||
}
|
||||
mongoTemplate.execute(collName, new CollectionCallback<Object>() {
|
||||
public Object doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
|
||||
if (dbId != null) {
|
||||
collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc);
|
||||
} else {
|
||||
|
||||
if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) {
|
||||
dbDoc.remove("_id");
|
||||
}
|
||||
collection.insertOne(dbDoc);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the collection the given entity type shall be persisted to.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private String getCollectionNameForEntity(Class<? extends ChangeSetBacked> entityClass) {
|
||||
return mongoTemplate.getCollectionName(entityClass);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.Entity;
|
||||
|
||||
import org.aspectj.lang.JoinPoint;
|
||||
import org.aspectj.lang.reflect.FieldSignature;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
import org.springframework.data.mongodb.crossstore.DocumentBacked;
|
||||
import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization;
|
||||
import org.springframework.data.crossstore.ChangeSet;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister;
|
||||
import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException;
|
||||
import org.springframework.data.crossstore.HashMapChangeSet;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
/**
|
||||
* Aspect to turn an object annotated with @Document into a persistent document using Mongo.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@Deprecated
|
||||
public aspect MongoDocumentBacking {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class);
|
||||
|
||||
// Aspect shared config
|
||||
private ChangeSetPersister<Object> changeSetPersister;
|
||||
|
||||
public void setChangeSetPersister(ChangeSetPersister<Object> changeSetPersister) {
|
||||
this.changeSetPersister = changeSetPersister;
|
||||
}
|
||||
|
||||
// ITD to introduce N state to Annotated objects
|
||||
declare parents : (@Entity *) implements DocumentBacked;
|
||||
|
||||
// The annotated fields that will be persisted in MongoDB rather than with JPA
|
||||
declare @field: @RelatedDocument * (@Entity+ *).*:@Transient;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Advise user-defined constructors of ChangeSetBacked objects to create a new
|
||||
// backing ChangeSet
|
||||
// -------------------------------------------------------------------------
|
||||
pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) :
|
||||
execution((DocumentBacked+).new(..)) &&
|
||||
!execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity);
|
||||
|
||||
pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) :
|
||||
execution((DocumentBacked+).new(ChangeSet)) &&
|
||||
this(entity) &&
|
||||
args(cs);
|
||||
|
||||
protected pointcut entityFieldGet(DocumentBacked entity) :
|
||||
get(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
!get(* DocumentBacked.*);
|
||||
|
||||
protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) :
|
||||
set(@RelatedDocument * DocumentBacked+.*) &&
|
||||
this(entity) &&
|
||||
args(newVal) &&
|
||||
!set(* DocumentBacked.*);
|
||||
|
||||
// intercept EntityManager.merge calls
|
||||
public pointcut entityManagerMerge(EntityManager em, Object entity) :
|
||||
call(* EntityManager.merge(Object)) &&
|
||||
target(em) &&
|
||||
args(entity);
|
||||
|
||||
// intercept EntityManager.remove calls
|
||||
// public pointcut entityManagerRemove(EntityManager em, Object entity) :
|
||||
// call(* EntityManager.remove(Object)) &&
|
||||
// target(em) &&
|
||||
// args(entity);
|
||||
|
||||
// move changeSet from detached entity to the newly merged persistent object
|
||||
Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) {
|
||||
Object mergedEntity = proceed(em, entity);
|
||||
if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) {
|
||||
((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet();
|
||||
}
|
||||
return mergedEntity;
|
||||
}
|
||||
|
||||
// clear changeSet from removed entity
|
||||
// Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) {
|
||||
// if (entity instanceof DocumentBacked) {
|
||||
// removeChangeSetValues((DocumentBacked)entity);
|
||||
// }
|
||||
// return proceed(em, entity);
|
||||
// }
|
||||
|
||||
private static void removeChangeSetValues(DocumentBacked entity) {
|
||||
LOGGER.debug("Removing all change-set values for " + entity);
|
||||
ChangeSet nulledCs = new HashMapChangeSet();
|
||||
DocumentBacked documentEntity = (DocumentBacked) entity;
|
||||
@SuppressWarnings("unchecked")
|
||||
ChangeSetPersister<Object> changeSetPersister = (ChangeSetPersister<Object>) documentEntity.itdChangeSetPersister;
|
||||
try {
|
||||
changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(),
|
||||
documentEntity.getChangeSet());
|
||||
} catch (DataAccessException e) {
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
for (String key : entity.getChangeSet().getValues().keySet()) {
|
||||
nulledCs.set(key, null);
|
||||
}
|
||||
entity.setChangeSet(nulledCs);
|
||||
}
|
||||
|
||||
before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) {
|
||||
LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass());
|
||||
// Populate all ITD fields
|
||||
entity.setChangeSet(new HashMapChangeSet());
|
||||
entity.itdChangeSetPersister = changeSetPersister;
|
||||
entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity);
|
||||
// registerTransactionSynchronization(entity);
|
||||
}
|
||||
|
||||
private static void registerTransactionSynchronization(DocumentBacked entity) {
|
||||
if (TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Adding transaction synchronization for " + entity);
|
||||
}
|
||||
TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization);
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization already active for " + entity);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Transaction synchronization is not active for " + entity);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// ChangeSet-related mixins
|
||||
// -------------------------------------------------------------------------
|
||||
// Introduced field
|
||||
@Transient
|
||||
private ChangeSet DocumentBacked.changeSet;
|
||||
|
||||
@Transient
|
||||
private ChangeSetPersister<?> DocumentBacked.itdChangeSetPersister;
|
||||
|
||||
@Transient
|
||||
private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization;
|
||||
|
||||
public void DocumentBacked.setChangeSet(ChangeSet cs) {
|
||||
this.changeSet = cs;
|
||||
}
|
||||
|
||||
public ChangeSet DocumentBacked.getChangeSet() {
|
||||
return changeSet;
|
||||
}
|
||||
|
||||
// Flush the entity state to the persistent store
|
||||
public void DocumentBacked.flush() {
|
||||
Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
itdChangeSetPersister.persistState(this, this.changeSet);
|
||||
}
|
||||
|
||||
public Object DocumentBacked.get_persistent_id() {
|
||||
return itdChangeSetPersister.getPersistentId(this, this.changeSet);
|
||||
}
|
||||
|
||||
// lifecycle methods
|
||||
@javax.persistence.PostPersist
|
||||
public void DocumentBacked.itdPostPersist() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName());
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PreUpdate
|
||||
public void DocumentBacked.itdPreUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostUpdate
|
||||
public void DocumentBacked.itdPostUpdate() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostRemove
|
||||
public void DocumentBacked.itdPostRemove() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
removeChangeSetValues(this);
|
||||
}
|
||||
|
||||
@javax.persistence.PostLoad
|
||||
public void DocumentBacked.itdPostLoad() {
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this);
|
||||
}
|
||||
registerTransactionSynchronization(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field reads to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity): entityFieldGet(entity) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet());
|
||||
if (entity.getChangeSet().getValues().get(propName) == null) {
|
||||
try {
|
||||
this.changeSetPersister
|
||||
.getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet());
|
||||
} catch (NotFoundException e) {
|
||||
}
|
||||
}
|
||||
Object fValue = entity.getChangeSet().getValues().get(propName);
|
||||
if (fValue != null) {
|
||||
return fValue;
|
||||
}
|
||||
return proceed(entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* delegates field writes to the state accessors instance
|
||||
*/
|
||||
Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) {
|
||||
Field f = field(thisJoinPoint);
|
||||
String propName = f.getName();
|
||||
LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]");
|
||||
entity.getChangeSet().set(propName, newVal);
|
||||
return proceed(entity, newVal);
|
||||
}
|
||||
|
||||
Field field(JoinPoint joinPoint) {
|
||||
FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature();
|
||||
return fieldSignature.getField();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,24 +13,19 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.util.json;
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* A value provider to retrieve bindable values by their parameter index.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @author Thomas Risberg
|
||||
* @deprecated will be removed without replacement.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ValueProvider {
|
||||
|
||||
/**
|
||||
* @param index parameter index to use.
|
||||
* @return can be {@literal null}.
|
||||
* @throws RuntimeException if the requested element does not exist.
|
||||
*/
|
||||
@Nullable
|
||||
Object getBindableValue(int index);
|
||||
@Deprecated
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD })
|
||||
public @interface RelatedDocument {
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* Infrastructure for Spring Data's MongoDB cross store support.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
@@ -0,0 +1,195 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.crossstore.test.Address;
|
||||
import org.springframework.data.mongodb.crossstore.test.Person;
|
||||
import org.springframework.data.mongodb.crossstore.test.Resume;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionCallback;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
/**
|
||||
* Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}).
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml")
|
||||
public class CrossStoreMongoTests {
|
||||
|
||||
@Autowired MongoTemplate mongoTemplate;
|
||||
|
||||
@PersistenceContext EntityManager entityManager;
|
||||
|
||||
@Autowired PlatformTransactionManager transactionManager;
|
||||
TransactionTemplate txTemplate;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
|
||||
txTemplate = new TransactionTemplate(transactionManager);
|
||||
|
||||
clearData(Person.class);
|
||||
|
||||
Address address = new Address(12, "MAin St.", "Boston", "MA", "02101");
|
||||
|
||||
Resume resume = new Resume();
|
||||
resume.addEducation("Skanstulls High School, 1975");
|
||||
resume.addEducation("Univ. of Stockholm, 1980");
|
||||
resume.addJob("DiMark, DBA, 1990-2000");
|
||||
resume.addJob("VMware, Developer, 2007-");
|
||||
|
||||
final Person person = new Person("Thomas", 20);
|
||||
person.setAddress(address);
|
||||
person.setResume(resume);
|
||||
person.setId(1L);
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.persist(person);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
txTemplate.execute(new TransactionCallback<Void>() {
|
||||
public Void doInTransaction(TransactionStatus status) {
|
||||
entityManager.remove(entityManager.find(Person.class, 1L));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void clearData(Class<?> domainType) {
|
||||
|
||||
String collectionName = mongoTemplate.getCollectionName(domainType);
|
||||
mongoTemplate.dropCollection(collectionName);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testReadJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Transactional
|
||||
public void testUpdatedJpaToMongoEntityRelationship() {
|
||||
|
||||
Person found = entityManager.find(Person.class, 1L);
|
||||
found.setAge(44);
|
||||
found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006");
|
||||
|
||||
entityManager.merge(found);
|
||||
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found);
|
||||
Assert.assertEquals(Long.valueOf(1), found.getId());
|
||||
Assert.assertNotNull(found.getResume());
|
||||
Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; "
|
||||
+ "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeJpaEntityWithMongoDocument() {
|
||||
|
||||
final Person detached = entityManager.find(Person.class, 1L);
|
||||
entityManager.detach(detached);
|
||||
detached.getResume().addJob("TargetRx, Developer, 2000-2005");
|
||||
|
||||
Person merged = txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person result = entityManager.merge(detached);
|
||||
entityManager.flush();
|
||||
return result;
|
||||
}
|
||||
});
|
||||
|
||||
Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
final Person updated = entityManager.find(Person.class, 1L);
|
||||
Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemoveJpaEntityWithMongoDocument() {
|
||||
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
Person p2 = new Person("Thomas", 20);
|
||||
Resume r2 = new Resume();
|
||||
r2.addEducation("Skanstulls High School, 1975");
|
||||
r2.addJob("DiMark, DBA, 1990-2000");
|
||||
p2.setResume(r2);
|
||||
p2.setId(2L);
|
||||
entityManager.persist(p2);
|
||||
Person p3 = new Person("Thomas", 20);
|
||||
Resume r3 = new Resume();
|
||||
r3.addEducation("Univ. of Stockholm, 1980");
|
||||
r3.addJob("VMware, Developer, 2007-");
|
||||
p3.setResume(r3);
|
||||
p3.setId(3L);
|
||||
entityManager.persist(p3);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
txTemplate.execute(new TransactionCallback<Person>() {
|
||||
public Person doInTransaction(TransactionStatus status) {
|
||||
final Person found2 = entityManager.find(Person.class, 2L);
|
||||
entityManager.remove(found2);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
boolean weFound3 = false;
|
||||
|
||||
for (Document dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) {
|
||||
Assert.assertTrue(!dbo.get("_entity_id").equals(2L));
|
||||
if (dbo.get("_entity_id").equals(3L)) {
|
||||
weFound3 = true;
|
||||
}
|
||||
}
|
||||
Assert.assertTrue(weFound3);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
public class Address {
|
||||
|
||||
private Integer streetNumber;
|
||||
private String streetName;
|
||||
private String city;
|
||||
private String state;
|
||||
private String zip;
|
||||
|
||||
public Address(Integer streetNumber, String streetName, String city, String state, String zip) {
|
||||
super();
|
||||
this.streetNumber = streetNumber;
|
||||
this.streetName = streetName;
|
||||
this.city = city;
|
||||
this.state = state;
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
public Integer getStreetNumber() {
|
||||
return streetNumber;
|
||||
}
|
||||
|
||||
public void setStreetNumber(Integer streetNumber) {
|
||||
this.streetNumber = streetNumber;
|
||||
}
|
||||
|
||||
public String getStreetName() {
|
||||
return streetName;
|
||||
}
|
||||
|
||||
public void setStreetName(String streetName) {
|
||||
this.streetName = streetName;
|
||||
}
|
||||
|
||||
public String getCity() {
|
||||
return city;
|
||||
}
|
||||
|
||||
public void setCity(String city) {
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
public String getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(String state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public String getZip() {
|
||||
return zip;
|
||||
}
|
||||
|
||||
public void setZip(String zip) {
|
||||
this.zip = zip;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import org.springframework.data.mongodb.crossstore.RelatedDocument;
|
||||
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
Long id;
|
||||
|
||||
private String name;
|
||||
|
||||
private int age;
|
||||
|
||||
private java.util.Date birthDate;
|
||||
|
||||
@RelatedDocument
|
||||
private Address address;
|
||||
|
||||
@RelatedDocument
|
||||
private Resume resume;
|
||||
|
||||
public Person() {
|
||||
}
|
||||
|
||||
public Person(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
this.birthDate = new java.util.Date();
|
||||
}
|
||||
|
||||
public void birthday() {
|
||||
++age;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
public void setAge(int age) {
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public java.util.Date getBirthDate() {
|
||||
return birthDate;
|
||||
}
|
||||
|
||||
public void setBirthDate(java.util.Date birthDate) {
|
||||
this.birthDate = birthDate;
|
||||
}
|
||||
|
||||
public Resume getResume() {
|
||||
return resume;
|
||||
}
|
||||
|
||||
public void setResume(Resume resume) {
|
||||
this.resume = resume;
|
||||
}
|
||||
|
||||
public Address getAddress() {
|
||||
return address;
|
||||
}
|
||||
|
||||
public void setAddress(Address address) {
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Copyright 2011-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.crossstore.test;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
|
||||
@Document
|
||||
public class Resume {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(Resume.class);
|
||||
|
||||
@Id
|
||||
private ObjectId id;
|
||||
|
||||
private String education = "";
|
||||
|
||||
private String jobs = "";
|
||||
|
||||
public String getId() {
|
||||
return id.toString();
|
||||
}
|
||||
|
||||
public String getEducation() {
|
||||
return education;
|
||||
}
|
||||
|
||||
public void addEducation(String education) {
|
||||
LOGGER.debug("Adding education " + education);
|
||||
this.education = this.education + (this.education.length() > 0 ? "; " : "") + education;
|
||||
}
|
||||
|
||||
public String getJobs() {
|
||||
return jobs;
|
||||
}
|
||||
|
||||
public void addJob(String job) {
|
||||
LOGGER.debug("Adding job " + job);
|
||||
this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Resume [education=" + education + ", jobs=" + jobs + "]";
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<persistence xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
version="2.0"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd">
|
||||
<persistence-unit name="test" transaction-type="RESOURCE_LOCAL">
|
||||
<provider>org.hibernate.ejb.HibernatePersistence</provider>
|
||||
<class>org.springframework.data.mongodb.crossstore.test.Person</class>
|
||||
<properties>
|
||||
<property name="hibernate.dialect" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
<!--value='create' to build a new database on each run; value='update' to modify an existing database; value='create-drop' means the same as 'create' but also drops tables when Hibernate closes; value='validate' makes no changes to the database-->
|
||||
<property name="hibernate.hbm2ddl.auto" value="update"/>
|
||||
<property name="hibernate.ejb.naming_strategy" value="org.hibernate.cfg.ImprovedNamingStrategy"/>
|
||||
</properties>
|
||||
</persistence-unit>
|
||||
</persistence>
|
||||
@@ -0,0 +1,72 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:tx="http://www.springframework.org/schema/tx"
|
||||
xmlns:jdbc="http://www.springframework.org/schema/jdbc"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:mongo="http://www.springframework.org/schema/data/mongo"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/data/mongo https://www.springframework.org/schema/data/mongo/spring-mongo.xsd
|
||||
http://www.springframework.org/schema/jdbc https://www.springframework.org/schema/jdbc/spring-jdbc-3.0.xsd
|
||||
http://www.springframework.org/schema/beans https://www.springframework.org/schema/beans/spring-beans-3.0.xsd
|
||||
http://www.springframework.org/schema/tx https://www.springframework.org/schema/tx/spring-tx-3.0.xsd
|
||||
http://www.springframework.org/schema/context https://www.springframework.org/schema/context/spring-context-3.0.xsd">
|
||||
|
||||
<context:spring-configured/>
|
||||
|
||||
<context:component-scan base-package="org.springframework.persistence.mongodb.test">
|
||||
<context:exclude-filter expression="org.springframework.stereotype.Controller" type="annotation"/>
|
||||
</context:component-scan>
|
||||
|
||||
<mongo:mapping-converter/>
|
||||
|
||||
<!-- Mongo config -->
|
||||
<bean id="mongoClient" class="org.springframework.data.mongodb.core.MongoClientFactoryBean">
|
||||
<property name="host" value="localhost"/>
|
||||
<property name="port" value="27017"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoDbFactory" class="org.springframework.data.mongodb.core.SimpleMongoDbFactory">
|
||||
<constructor-arg name="mongoClient" ref="mongoClient"/>
|
||||
<constructor-arg name="databaseName" value="database"/>
|
||||
</bean>
|
||||
|
||||
<bean id="mongoTemplate" class="org.springframework.data.mongodb.core.MongoTemplate">
|
||||
<constructor-arg name="mongoDbFactory" ref="mongoDbFactory"/>
|
||||
<constructor-arg name="mongoConverter" ref="mappingConverter"/>
|
||||
</bean>
|
||||
|
||||
<bean class="org.springframework.data.mongodb.core.MongoExceptionTranslator"/>
|
||||
|
||||
<!-- Mongo aspect config -->
|
||||
<bean class="org.springframework.data.mongodb.crossstore.MongoDocumentBacking"
|
||||
factory-method="aspectOf">
|
||||
<property name="changeSetPersister" ref="mongoChangeSetPersister"/>
|
||||
</bean>
|
||||
<bean id="mongoChangeSetPersister"
|
||||
class="org.springframework.data.mongodb.crossstore.MongoChangeSetPersister">
|
||||
<property name="mongoTemplate" ref="mongoTemplate"/>
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<jdbc:embedded-database id="dataSource" type="HSQL">
|
||||
</jdbc:embedded-database>
|
||||
|
||||
<bean id="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager">
|
||||
<property name="entityManagerFactory" ref="entityManagerFactory"/>
|
||||
</bean>
|
||||
|
||||
<tx:annotation-driven mode="aspectj" transaction-manager="transactionManager"/>
|
||||
|
||||
<bean class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean" id="entityManagerFactory">
|
||||
<property name="persistenceUnitName" value="test"/>
|
||||
<property name="dataSource" ref="dataSource"/>
|
||||
<property name="jpaVendorAdapter">
|
||||
<bean class="org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter">
|
||||
<property name="showSql" value="true"/>
|
||||
<property name="generateDdl" value="true"/>
|
||||
<property name="databasePlatform" value="org.hibernate.dialect.HSQLDialect"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
|
||||
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d %5p %40.40c:%4L - %m%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!--
|
||||
<logger name="org.springframework" level="debug" />
|
||||
-->
|
||||
|
||||
<root level="error">
|
||||
<appender-ref ref="console" />
|
||||
</root>
|
||||
|
||||
</configuration>
|
||||
@@ -1,6 +1,5 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
@@ -14,7 +13,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC2</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -32,15 +31,8 @@
|
||||
<plugin>
|
||||
<groupId>org.asciidoctor</groupId>
|
||||
<artifactId>asciidoctor-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<attributes>
|
||||
<mongo-reactivestreams>${mongo.reactivestreams}</mongo-reactivestreams>
|
||||
<reactor>${reactor}</reactor>
|
||||
</attributes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
</build>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>2.2.0.RC2</version>
|
||||
<version>2.1.14.RELEASE</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -83,14 +83,14 @@
|
||||
|
||||
<!-- reactive -->
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-reactivestreams</artifactId>
|
||||
<version>${mongo.reactivestreams}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<groupId>org.mongodb</groupId>
|
||||
<artifactId>mongodb-driver-async</artifactId>
|
||||
<version>${mongo}</version>
|
||||
<optional>true</optional>
|
||||
@@ -107,7 +107,7 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-core</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
@@ -119,14 +119,14 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<groupId>io.reactivex</groupId>
|
||||
<artifactId>rxjava-reactive-streams</artifactId>
|
||||
<version>${rxjava-reactive-streams}</version>
|
||||
<optional>true</optional>
|
||||
@@ -264,27 +264,20 @@
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-reflect</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-core</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-reactor</artifactId>
|
||||
<version>${kotlin-coroutines}</version>
|
||||
<optional>true</optional>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -329,7 +322,6 @@
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<useSystemClassLoader>false</useSystemClassLoader>
|
||||
<useFile>false</useFile>
|
||||
<includes>
|
||||
<include>**/*Tests.java</include>
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright 2010-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.data.authentication.UserCredentials;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Exception being thrown in case we cannot connect to a MongoDB instance.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException {
|
||||
|
||||
private final UserCredentials credentials;
|
||||
private final @Nullable String database;
|
||||
|
||||
private static final long serialVersionUID = 1172099106475265589L;
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
this.database = null;
|
||||
this.credentials = UserCredentials.NO_CREDENTIALS;
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg) {
|
||||
this(msg, null, UserCredentials.NO_CREDENTIALS);
|
||||
}
|
||||
|
||||
public CannotGetMongoDbConnectionException(String msg, @Nullable String database, UserCredentials credentials) {
|
||||
super(msg);
|
||||
this.database = database;
|
||||
this.credentials = credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public UserCredentials getCredentials() {
|
||||
return this.credentials;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the database trying to be accessed.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
@@ -87,16 +88,4 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider {
|
||||
* @since 2.1
|
||||
*/
|
||||
ReactiveMongoDatabaseFactory withSession(ClientSession session);
|
||||
|
||||
/**
|
||||
* Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @return {@literal true} if there's an active transaction, {@literal false} otherwise.
|
||||
* @since 2.2
|
||||
*/
|
||||
default boolean isTransactionActive() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,278 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.context.Context;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.NoTransactionException;
|
||||
import org.springframework.transaction.reactive.ReactiveResourceSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronization;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.ResourceHolderSynchronization;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a
|
||||
* {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the
|
||||
* {@link ReactiveMongoDatabaseFactory resource} and if the associated
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} has an
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}.
|
||||
*
|
||||
* @param databaseFactory the resource to check transactions for. Must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction.
|
||||
*/
|
||||
public static Mono<Boolean> isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
if (databaseFactory.isTransactionActive()) {
|
||||
return Mono.just(true);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction() //
|
||||
.map(it -> {
|
||||
|
||||
ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory);
|
||||
return holder != null && holder.hasActiveTransaction();
|
||||
}) //
|
||||
.onErrorResume(NoTransactionException.class, e -> Mono.just(false));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(null, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory) {
|
||||
return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
* @param dbName the name of the {@link MongoDatabase} to get.
|
||||
* @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from.
|
||||
* @param sessionSynchronization the synchronization to use. Must not be {@literal null}.
|
||||
* @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}.
|
||||
*/
|
||||
public static Mono<MongoDatabase> getDatabase(String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
return doGetMongoDatabase(dbName, factory, sessionSynchronization);
|
||||
}
|
||||
|
||||
private static Mono<MongoDatabase> doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory,
|
||||
SessionSynchronization sessionSynchronization) {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
return doGetSession(synchronizationManager, factory, sessionSynchronization) //
|
||||
.map(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it)));
|
||||
})
|
||||
.onErrorResume(NoTransactionException.class,
|
||||
e -> Mono.fromSupplier(() -> getMongoDatabaseOrDefault(dbName, factory)))
|
||||
.defaultIfEmpty(getMongoDatabaseOrDefault(dbName, factory));
|
||||
}
|
||||
|
||||
private static MongoDatabase getMongoDatabaseOrDefault(@Nullable String dbName,
|
||||
ReactiveMongoDatabaseFactory factory) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> doGetSession(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) {
|
||||
|
||||
final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(dbFactory);
|
||||
|
||||
// check for native MongoDB transaction
|
||||
if (registeredHolder != null
|
||||
&& (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) {
|
||||
|
||||
return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession())
|
||||
: createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent);
|
||||
}
|
||||
|
||||
if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) {
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
// init a non native MongoDB transaction by registering a MongoSessionSynchronization
|
||||
return createClientSession(dbFactory).map(session -> {
|
||||
|
||||
ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory);
|
||||
newHolder.getRequiredSession().startTransaction();
|
||||
|
||||
synchronizationManager
|
||||
.registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory));
|
||||
newHolder.setSynchronizedWithTransaction(true);
|
||||
synchronizationManager.bindResource(dbFactory, newHolder);
|
||||
|
||||
return newHolder.getSession();
|
||||
});
|
||||
}
|
||||
|
||||
private static Mono<ClientSession> createClientSession(ReactiveMongoDatabaseFactory dbFactory) {
|
||||
return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when
|
||||
* participating in a non-native MongoDB transaction, such as a R2CBC transaction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
private static class MongoSessionSynchronization
|
||||
extends ReactiveResourceSynchronization<ReactiveMongoResourceHolder, Object> {
|
||||
|
||||
private final ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) {
|
||||
|
||||
super(resourceHolder, dbFactory, synchronizationManager);
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#shouldReleaseBeforeCompletion()
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldReleaseBeforeCompletion() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#processResourceAfterCommit(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (isTransactionActive(resourceHolder)) {
|
||||
return Mono.from(resourceHolder.getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#afterCompletion(int)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Void> afterCompletion(int status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) {
|
||||
|
||||
return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) //
|
||||
.then(super.afterCompletion(status));
|
||||
}
|
||||
|
||||
return super.afterCompletion(status);
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.ReactiveResourceSynchronization#releaseResource(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
if (resourceHolder.hasActiveSession()) {
|
||||
resourceHolder.getRequiredSession().close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) {
|
||||
|
||||
if (!resourceHolder.hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return resourceHolder.getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,155 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.support.ResourceHolderSupport;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoTransactionManager
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
class ReactiveMongoResourceHolder extends ResourceHolderSupport {
|
||||
|
||||
private @Nullable ClientSession session;
|
||||
private ReactiveMongoDatabaseFactory databaseFactory;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}.
|
||||
*
|
||||
* @param session the associated {@link ClientSession}. Can be {@literal null}.
|
||||
* @param databaseFactory the associated {@link MongoDbFactory}. must not be {@literal null}.
|
||||
*/
|
||||
ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
this.session = session;
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ClientSession}. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
ClientSession getSession() {
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the required associated {@link ClientSession}.
|
||||
* @throws IllegalStateException if no session is associated.
|
||||
*/
|
||||
ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
|
||||
if (session == null) {
|
||||
throw new IllegalStateException("No ClientSession associated");
|
||||
}
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the associated {@link ReactiveMongoDatabaseFactory}.
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ClientSession} to guard.
|
||||
*
|
||||
* @param session can be {@literal null}.
|
||||
*/
|
||||
public void setSession(@Nullable ClientSession session) {
|
||||
this.session = session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if session is not {@literal null}.
|
||||
*/
|
||||
boolean hasSession() {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a
|
||||
* {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current
|
||||
* bound session is returned.
|
||||
*
|
||||
* @param session
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public ClientSession setSessionIfAbsent(@Nullable ClientSession session) {
|
||||
|
||||
if (!hasSession()) {
|
||||
setSession(session);
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session is active and has not been closed.
|
||||
*/
|
||||
boolean hasActiveSession() {
|
||||
|
||||
if (!hasSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hasServerSession() && !getRequiredSession().getServerSession().isClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the session has an active transaction.
|
||||
* @see #hasActiveSession()
|
||||
*/
|
||||
boolean hasActiveTransaction() {
|
||||
|
||||
if (!hasActiveSession()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return getRequiredSession().hasActiveTransaction();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated
|
||||
* that is accessible via {@link ClientSession#getServerSession()}.
|
||||
*/
|
||||
boolean hasServerSession() {
|
||||
|
||||
try {
|
||||
return getRequiredSession().getServerSession() != null;
|
||||
} catch (IllegalStateException serverSessionClosed) {
|
||||
// ignore
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -1,530 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.TransactionException;
|
||||
import org.springframework.transaction.TransactionSystemException;
|
||||
import org.springframework.transaction.reactive.AbstractReactiveTransactionManager;
|
||||
import org.springframework.transaction.reactive.GenericReactiveTransaction;
|
||||
import org.springframework.transaction.reactive.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.SmartTransactionObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.TransactionOptions;
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
* behavior as outlined in the MongoDB reference manual.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see <a href="https://www.mongodb.com/transactions">MongoDB Transaction Documentation</a>
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean {
|
||||
|
||||
private @Nullable ReactiveMongoDatabaseFactory databaseFactory;
|
||||
private @Nullable TransactionOptions options;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
* @see #setDatabaseFactory(ReactiveMongoDatabaseFactory)
|
||||
*/
|
||||
public ReactiveMongoTransactionManager() {}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory}.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
this(databaseFactory, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given
|
||||
* {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when
|
||||
* starting a new transaction.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory,
|
||||
@Nullable TransactionOptions options) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
|
||||
this.databaseFactory = databaseFactory;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doGetTransaction(org.springframework.transaction.reactive.TransactionSynchronizationManager)
|
||||
*/
|
||||
@Override
|
||||
protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager)
|
||||
throws TransactionException {
|
||||
|
||||
ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager
|
||||
.getResource(getRequiredDatabaseFactory());
|
||||
return new ReactiveMongoTransactionObject(resourceHolder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#isExistingTransaction(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected boolean isExistingTransaction(Object transaction) throws TransactionException {
|
||||
return extractMongoTransaction(transaction).hasResourceHolder();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doBegin(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, org.springframework.transaction.TransactionDefinition)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction,
|
||||
TransactionDefinition definition) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
|
||||
Mono<ReactiveMongoResourceHolder> holder = newResourceHolder(definition,
|
||||
ClientSessionOptions.builder().causallyConsistent(true).build());
|
||||
|
||||
return holder.doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.setResourceHolder(resourceHolder);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
}).doOnNext(resourceHolder -> {
|
||||
|
||||
mongoTransactionObject.startTransaction(options);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession())));
|
||||
}
|
||||
|
||||
})//
|
||||
.onErrorMap(
|
||||
ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex))
|
||||
.doOnSuccess(resourceHolder -> {
|
||||
|
||||
synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder);
|
||||
}).then();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSuspend(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Object> doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction)
|
||||
throws TransactionException {
|
||||
|
||||
return Mono.fromSupplier(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction);
|
||||
mongoTransactionObject.setResourceHolder(null);
|
||||
|
||||
return synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doResume(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction,
|
||||
Object suspendedResources) {
|
||||
return Mono
|
||||
.fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCommit(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected final Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to commit transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> {
|
||||
return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook to perform an actual commit of the given transaction.<br />
|
||||
* If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding
|
||||
* {@literal error labels}. <br />
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit.
|
||||
*
|
||||
* @param synchronizationManager reactive synchronization manager.
|
||||
* @param transactionObject never {@literal null}.
|
||||
*/
|
||||
protected Mono<Void> doCommit(TransactionSynchronizationManager synchronizationManager,
|
||||
ReactiveMongoTransactionObject transactionObject) {
|
||||
return transactionObject.commitTransaction();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doRollback(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doRollback(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to abort transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> {
|
||||
return Mono
|
||||
.error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.",
|
||||
debugString(mongoTransactionObject.getSession())), ex));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doSetRollbackOnly(org.springframework.transaction.reactive.TransactionSynchronizationManager, org.springframework.transaction.reactive.GenericReactiveTransaction)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager,
|
||||
GenericReactiveTransaction status) throws TransactionException {
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status);
|
||||
transactionObject.getRequiredResourceHolder().setRollbackOnly();
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.reactive.AbstractReactiveTransactionManager#doCleanupAfterCompletion(org.springframework.transaction.reactive.TransactionSynchronizationManager, java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
protected Mono<Void> doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager,
|
||||
Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return Mono.fromRunnable(() -> {
|
||||
ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction;
|
||||
|
||||
// Remove the connection holder from the thread.
|
||||
synchronizationManager.unbindResource(getRequiredDatabaseFactory());
|
||||
mongoTransactionObject.getRequiredResourceHolder().clear();
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(String.format("About to release Session %s after transaction.",
|
||||
debugString(mongoTransactionObject.getSession())));
|
||||
}
|
||||
|
||||
mongoTransactionObject.closeSession();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for.
|
||||
*
|
||||
* @param databaseFactory must not be {@literal null}.
|
||||
*/
|
||||
public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) {
|
||||
|
||||
Assert.notNull(databaseFactory, "DatabaseFactory must not be null!");
|
||||
this.databaseFactory = databaseFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link TransactionOptions} to be applied when starting transactions.
|
||||
*
|
||||
* @param options can be {@literal null}.
|
||||
*/
|
||||
public void setOptions(@Nullable TransactionOptions options) {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public ReactiveMongoDatabaseFactory getDatabaseFactory() {
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet()
|
||||
*/
|
||||
@Override
|
||||
public void afterPropertiesSet() {
|
||||
getRequiredDatabaseFactory();
|
||||
}
|
||||
|
||||
private Mono<ReactiveMongoResourceHolder> newResourceHolder(TransactionDefinition definition,
|
||||
ClientSessionOptions options) {
|
||||
|
||||
ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory();
|
||||
|
||||
return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory));
|
||||
}
|
||||
|
||||
/**
|
||||
* @throws IllegalStateException if {@link #databaseFactory} is {@literal null}.
|
||||
*/
|
||||
private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() {
|
||||
|
||||
Assert.state(databaseFactory != null,
|
||||
"ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory. Did you forget to provide one? It's required.");
|
||||
|
||||
return databaseFactory;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction,
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
transaction.getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) transaction;
|
||||
}
|
||||
|
||||
private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) {
|
||||
|
||||
Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(),
|
||||
() -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class,
|
||||
status.getTransaction().getClass()));
|
||||
|
||||
return (ReactiveMongoTransactionObject) status.getTransaction();
|
||||
}
|
||||
|
||||
private static String debugString(@Nullable ClientSession session) {
|
||||
|
||||
if (session == null) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()),
|
||||
Integer.toHexString(session.hashCode()));
|
||||
|
||||
try {
|
||||
if (session.getServerSession() != null) {
|
||||
debugString += String.format("id = %s, ", session.getServerSession().getIdentifier());
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber());
|
||||
debugString += String.format("closed = %d, ", session.getServerSession().isClosed());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
} else {
|
||||
debugString += "id = n/a";
|
||||
debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent());
|
||||
debugString += String.format("txActive = %s, ", session.hasActiveTransaction());
|
||||
debugString += String.format("clusterTime = %s", session.getClusterTime());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
debugString += String.format("error = %s", e.getMessage());
|
||||
}
|
||||
|
||||
debugString += "]";
|
||||
|
||||
return debugString;
|
||||
}
|
||||
|
||||
/**
|
||||
* MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by
|
||||
* {@link ReactiveMongoTransactionManager}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
* @see ReactiveMongoResourceHolder
|
||||
*/
|
||||
protected static class ReactiveMongoTransactionObject implements SmartTransactionObject {
|
||||
|
||||
private @Nullable ReactiveMongoResourceHolder resourceHolder;
|
||||
|
||||
ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link MongoResourceHolder}.
|
||||
*
|
||||
* @param resourceHolder can be {@literal null}.
|
||||
*/
|
||||
void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) {
|
||||
this.resourceHolder = resourceHolder;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a {@link MongoResourceHolder} is set.
|
||||
*/
|
||||
final boolean hasResourceHolder() {
|
||||
return resourceHolder != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a MongoDB transaction optionally given {@link TransactionOptions}.
|
||||
*
|
||||
* @param options can be {@literal null}
|
||||
*/
|
||||
void startTransaction(@Nullable TransactionOptions options) {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (options != null) {
|
||||
session.startTransaction(options);
|
||||
} else {
|
||||
session.startTransaction();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction.
|
||||
*/
|
||||
public Mono<Void> commitTransaction() {
|
||||
return Mono.from(getRequiredSession().commitTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback (abort) the transaction.
|
||||
*/
|
||||
public Mono<Void> abortTransaction() {
|
||||
return Mono.from(getRequiredSession().abortTransaction());
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a {@link ClientSession} without regard to its transactional state.
|
||||
*/
|
||||
void closeSession() {
|
||||
|
||||
ClientSession session = getRequiredSession();
|
||||
if (session.getServerSession() != null && !session.getServerSession().isClosed()) {
|
||||
session.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public ClientSession getSession() {
|
||||
return resourceHolder != null ? resourceHolder.getSession() : null;
|
||||
}
|
||||
|
||||
private ReactiveMongoResourceHolder getRequiredResourceHolder() {
|
||||
|
||||
Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present. o_O");
|
||||
return resourceHolder;
|
||||
}
|
||||
|
||||
private ClientSession getRequiredSession() {
|
||||
|
||||
ClientSession session = getSession();
|
||||
Assert.state(session != null, "A Session is required but it turned out to be null.");
|
||||
return session;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly()
|
||||
*/
|
||||
@Override
|
||||
public boolean isRollbackOnly() {
|
||||
return this.resourceHolder != null && this.resourceHolder.isRollbackOnly();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.transaction.support.SmartTransactionObject#flush()
|
||||
*/
|
||||
@Override
|
||||
public void flush() {
|
||||
throw new UnsupportedOperationException("flush() not supported");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -51,6 +51,7 @@ import org.springframework.core.type.filter.AssignableTypeFilter;
|
||||
import org.springframework.core.type.filter.TypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.config.BeanComponentDefinitionBuilder;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
@@ -101,6 +102,8 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext);
|
||||
String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id);
|
||||
|
||||
createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element);
|
||||
|
||||
// Need a reference to a Mongo instance
|
||||
String dbFactoryRef = element.getAttribute("db-factory-ref");
|
||||
if (!StringUtils.hasText(dbFactoryRef)) {
|
||||
@@ -345,6 +348,20 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context,
|
||||
Element element) {
|
||||
|
||||
BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(MappingContextIsNewStrategyFactory.class);
|
||||
mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef);
|
||||
|
||||
BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context);
|
||||
context.registerBeanComponent(
|
||||
builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY_BEAN_NAME));
|
||||
|
||||
return IS_NEW_STRATEGY_FACTORY_BEAN_NAME;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches.
|
||||
*
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.config;
|
||||
import static org.springframework.data.config.ParsingUtils.*;
|
||||
import static org.springframework.data.mongodb.config.BeanNames.*;
|
||||
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.beans.factory.support.RootBeanDefinition;
|
||||
@@ -27,33 +26,25 @@ import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.beans.factory.xml.ParserContext;
|
||||
import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information
|
||||
* on an entity.
|
||||
* {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on
|
||||
* an entity.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element)
|
||||
*/
|
||||
@Override
|
||||
protected Class<?> getBeanClass(Element element) {
|
||||
return AuditingEntityCallback.class;
|
||||
return AuditingEventListener.class;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -89,24 +80,7 @@ public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinit
|
||||
mappingContextRef);
|
||||
parser.parse(element, parserContext);
|
||||
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element));
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler,
|
||||
parserContext.extractSource(element));
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry,
|
||||
AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(isNewAwareAuditingHandler);
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition());
|
||||
builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(),
|
||||
parserContext.extractSource(element)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,23 +32,17 @@ import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
@@ -110,27 +104,12 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder
|
||||
.rootBeanDefinition(AuditingEntityCallback.class);
|
||||
.rootBeanDefinition(AuditingEventListener.class);
|
||||
listenerBeanDefinitionBuilder
|
||||
.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(registry, auditingHandlerDefinition.getSource());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
AuditingEventListener.class.getName(), registry);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
@@ -27,12 +28,17 @@ import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory;
|
||||
import org.springframework.data.mapping.context.PersistentEntities;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.support.CachingIsNewStrategyFactory;
|
||||
import org.springframework.data.support.IsNewStrategyFactory;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -81,11 +87,23 @@ public abstract class MongoConfigurationSupport {
|
||||
mappingContext.setInitialEntitySet(getInitialEntitySet());
|
||||
mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder());
|
||||
mappingContext.setFieldNamingStrategy(fieldNamingStrategy());
|
||||
mappingContext.setAutoIndexCreation(autoIndexCreation());
|
||||
|
||||
return mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}.
|
||||
*
|
||||
* @return
|
||||
* @throws ClassNotFoundException
|
||||
*/
|
||||
@Bean
|
||||
public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException {
|
||||
|
||||
return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory(
|
||||
new PersistentEntities(Arrays.<MappingContext<?, ?>> asList(new MappingContext[] { mongoMappingContext() }))));
|
||||
}
|
||||
|
||||
/**
|
||||
* Register custom {@link Converter}s in a {@link CustomConversions} object if required. These
|
||||
* {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and
|
||||
@@ -172,16 +190,4 @@ public abstract class MongoConfigurationSupport {
|
||||
return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy()
|
||||
: PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure whether to automatically create indices for domain types by deriving the
|
||||
* {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not.
|
||||
*
|
||||
* @return {@literal true} by default. <br />
|
||||
* <strong>INFO</strong>: As of 3.x the default will be set to {@literal false}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected boolean autoIndexCreation() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.beans.PropertyEditorSupport;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@@ -27,7 +26,6 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoCredential;
|
||||
@@ -80,23 +78,12 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0]));
|
||||
} else if ("MONGODB-CR".equals(authMechanism)) {
|
||||
} else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUsernameAndPasswordPresent(userNameAndPassword);
|
||||
verifyDatabasePresent(database);
|
||||
|
||||
Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class,
|
||||
"createMongoCRCredential", String.class, String.class, char[].class);
|
||||
|
||||
if (createCRCredentialMethod == null) {
|
||||
throw new IllegalArgumentException("MONGODB-CR is no longer supported.");
|
||||
}
|
||||
|
||||
MongoCredential credential = MongoCredential.class
|
||||
.cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
credentials.add(credential);
|
||||
|
||||
credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database,
|
||||
userNameAndPassword[1].toCharArray()));
|
||||
} else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) {
|
||||
|
||||
verifyUserNamePresent(userNameAndPassword);
|
||||
|
||||
@@ -35,7 +35,6 @@ import org.w3c.dom.Element;
|
||||
* @author Oliver Gierke
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class MongoParsingUtils {
|
||||
@@ -93,7 +92,6 @@ abstract class MongoParsingUtils {
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory");
|
||||
setPropertyReference(clientOptionsDefBuilder, optionsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout");
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
@@ -31,7 +31,6 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
*
|
||||
* @author Tobias Trelle
|
||||
* @author Oliver Gierke
|
||||
* @author Minsu Kim
|
||||
* @since 1.9
|
||||
*/
|
||||
public interface BulkOperations {
|
||||
@@ -136,29 +135,6 @@ public interface BulkOperations {
|
||||
*/
|
||||
BulkOperations remove(List<Query> removes);
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default BulkOperations replaceOne(Query query, Object replacement) {
|
||||
return replaceOne(query, replacement, FindAndReplaceOptions.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a single replace operation to the bulk operation.
|
||||
*
|
||||
* @param query Update criteria.
|
||||
* @param replacement the replacement document. Must not be {@literal null}.
|
||||
* @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}.
|
||||
* @return the current {@link BulkOperations} instance with the replace added, will never be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options);
|
||||
|
||||
/**
|
||||
* Execute all bulk operations using the default write concern.
|
||||
*
|
||||
|
||||
@@ -20,7 +20,6 @@ import lombok.EqualsAndHashCode;
|
||||
import java.time.Instant;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
@@ -85,19 +84,8 @@ public class ChangeStreamEvent<T> {
|
||||
@Nullable
|
||||
public Instant getTimestamp() {
|
||||
|
||||
return getBsonTimestamp() != null ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class)
|
||||
: null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link ChangeStreamDocument#getClusterTime() cluster time}.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public BsonTimestamp getBsonTimestamp() {
|
||||
return raw != null ? raw.getClusterTime() : null;
|
||||
return raw != null && raw.getClusterTime() != null
|
||||
? converter.getConversionService().convert(raw.getClusterTime(), Instant.class) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,17 +21,12 @@ import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
@@ -52,8 +47,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
|
||||
protected ChangeStreamOptions() {}
|
||||
|
||||
@@ -89,31 +83,7 @@ public class ChangeStreamOptions {
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<Instant> getResumeTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<BsonTimestamp> getResumeBsonTimestamp() {
|
||||
return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isStartAfter() {
|
||||
return Resume.START_AFTER.equals(resume);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isResumeAfter() {
|
||||
return Resume.RESUME_AFTER.equals(resume);
|
||||
return Optional.ofNullable(resumeTimestamp);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -133,48 +103,6 @@ public class ChangeStreamOptions {
|
||||
return new ChangeStreamOptionsBuilder();
|
||||
}
|
||||
|
||||
private static <T> T asTimestampOfType(Object timestamp, Class<T> targetType) {
|
||||
return targetType.cast(doGetTimestamp(timestamp, targetType));
|
||||
}
|
||||
|
||||
private static <T> Object doGetTimestamp(Object timestamp, Class<T> targetType) {
|
||||
|
||||
if (ClassUtils.isAssignableValue(targetType, timestamp)) {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
if (timestamp instanceof Instant) {
|
||||
return new BsonTimestamp((int) ((Instant) timestamp).getEpochSecond(), 0);
|
||||
}
|
||||
|
||||
if (timestamp instanceof BsonTimestamp) {
|
||||
return Instant.ofEpochSecond(((BsonTimestamp) timestamp).getTime());
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
"o_O that should actually not happen. The timestamp should be an Instant or a BsonTimestamp but was "
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
enum Resume {
|
||||
|
||||
UNDEFINED,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument)
|
||||
*/
|
||||
START_AFTER,
|
||||
|
||||
/**
|
||||
* @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument)
|
||||
*/
|
||||
RESUME_AFTER
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for creating {@link ChangeStreamOptions}.
|
||||
*
|
||||
@@ -187,8 +115,7 @@ public class ChangeStreamOptions {
|
||||
private @Nullable BsonValue resumeToken;
|
||||
private @Nullable FullDocument fullDocumentLookup;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable Object resumeTimestamp;
|
||||
private Resume resume = Resume.UNDEFINED;
|
||||
private @Nullable Instant resumeTimestamp;
|
||||
|
||||
private ChangeStreamOptionsBuilder() {}
|
||||
|
||||
@@ -256,11 +183,6 @@ public class ChangeStreamOptions {
|
||||
Assert.notNull(resumeToken, "ResumeToken must not be null!");
|
||||
|
||||
this.resumeToken = resumeToken;
|
||||
|
||||
if (this.resume == Resume.UNDEFINED) {
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -302,51 +224,6 @@ public class ChangeStreamOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the cluster time to resume from.
|
||||
*
|
||||
* @param resumeTimestamp must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) {
|
||||
|
||||
Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!");
|
||||
|
||||
this.resumeTimestamp = resumeTimestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to continue emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.RESUME_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the resume token after which to start emitting notifications.
|
||||
*
|
||||
* @param resumeToken must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) {
|
||||
|
||||
resumeToken(resumeToken);
|
||||
this.resume = Resume.START_AFTER;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the built {@link ChangeStreamOptions}
|
||||
*/
|
||||
@@ -354,12 +231,11 @@ public class ChangeStreamOptions {
|
||||
|
||||
ChangeStreamOptions options = new ChangeStreamOptions();
|
||||
|
||||
options.filter = this.filter;
|
||||
options.resumeToken = this.resumeToken;
|
||||
options.fullDocumentLookup = this.fullDocumentLookup;
|
||||
options.collation = this.collation;
|
||||
options.resumeTimestamp = this.resumeTimestamp;
|
||||
options.resume = this.resume;
|
||||
options.filter = filter;
|
||||
options.resumeToken = resumeToken;
|
||||
options.fullDocumentLookup = fullDocumentLookup;
|
||||
options.collation = collation;
|
||||
options.resumeTimestamp = resumeTimestamp;
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ import com.mongodb.client.FindIterable;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
interface CursorPreparer {
|
||||
public interface CursorPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -23,18 +26,11 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
@@ -42,13 +38,18 @@ import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
import com.mongodb.client.model.*;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
import com.mongodb.client.model.BulkWriteOptions;
|
||||
import com.mongodb.client.model.DeleteManyModel;
|
||||
import com.mongodb.client.model.DeleteOneModel;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.InsertOneModel;
|
||||
import com.mongodb.client.model.UpdateManyModel;
|
||||
import com.mongodb.client.model.UpdateOneModel;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.WriteModel;
|
||||
|
||||
/**
|
||||
* Default implementation for {@link BulkOperations}.
|
||||
@@ -57,8 +58,6 @@ import lombok.Value;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Minsu Kim
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @since 1.9
|
||||
*/
|
||||
@@ -67,7 +66,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final MongoOperations mongoOperations;
|
||||
private final String collectionName;
|
||||
private final BulkOperationContext bulkOperationContext;
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
private final List<WriteModel<Document>> models = new ArrayList<>();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
@@ -124,9 +123,16 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
Assert.notNull(document, "Document must not be null!");
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(document);
|
||||
addModel(source, new InsertOneModel<>(getMappedObject(source)));
|
||||
if (document instanceof Document) {
|
||||
|
||||
models.add(new InsertOneModel<>((Document) document));
|
||||
return this;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
mongoOperations.getConverter().write(document, sink);
|
||||
|
||||
models.add(new InsertOneModel<>(sink));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -240,7 +246,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
DeleteOptions deleteOptions = new DeleteOptions();
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation);
|
||||
|
||||
addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
models.add(new DeleteManyModel<>(query.getQueryObject(), deleteOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -261,29 +267,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#replaceOne(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions)
|
||||
*/
|
||||
@Override
|
||||
public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) {
|
||||
|
||||
Assert.notNull(query, "Query must not be null!");
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
ReplaceOptions replaceOptions = new ReplaceOptions();
|
||||
replaceOptions.upsert(options.isUpsert());
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation);
|
||||
|
||||
maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName));
|
||||
Object source = maybeInvokeBeforeConvertCallback(replacement);
|
||||
addModel(source,
|
||||
new ReplaceOneModel<>(getMappedQuery(query.getQueryObject()), getMappedObject(source), replaceOptions));
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.BulkOperations#executeBulk()
|
||||
@@ -293,48 +276,18 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
|
||||
try {
|
||||
|
||||
com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo);
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
Assert.state(result != null, "Result must not be null.");
|
||||
|
||||
models.forEach(this::maybeEmitAfterSaveEvent);
|
||||
|
||||
return result;
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions);
|
||||
});
|
||||
} finally {
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
}
|
||||
|
||||
private BulkWriteResult bulkWriteTo(MongoCollection<Document> collection) {
|
||||
|
||||
if (defaultWriteConcern != null) {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
}
|
||||
|
||||
private WriteModel<Document> extractAndMapWriteModel(SourceAwareWriteModelHolder it) {
|
||||
|
||||
maybeEmitBeforeSaveEvent(it);
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeInvokeBeforeSaveCallback(it.getSource(), target);
|
||||
}
|
||||
|
||||
return mapWriteModel(it.getModel());
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs update and upsert bulk operations.
|
||||
*
|
||||
@@ -354,9 +307,9 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation);
|
||||
|
||||
if (multi) {
|
||||
addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
models.add(new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
} else {
|
||||
addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
models.add(new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options));
|
||||
}
|
||||
|
||||
return this;
|
||||
@@ -405,76 +358,6 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity());
|
||||
}
|
||||
|
||||
private Document getMappedObject(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
Document sink = new Document();
|
||||
|
||||
mongoOperations.getConverter().write(source, sink);
|
||||
return sink;
|
||||
}
|
||||
|
||||
private void addModel(Object source, WriteModel<Document> model) {
|
||||
models.add(new SourceAwareWriteModelHolder(source, model));
|
||||
}
|
||||
|
||||
private void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new BeforeSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder it) {
|
||||
|
||||
if (it.getModel() instanceof InsertOneModel) {
|
||||
|
||||
Document target = ((InsertOneModel<Document>) it.getModel()).getDocument();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
} else if (it.getModel() instanceof ReplaceOneModel) {
|
||||
|
||||
Document target = ((ReplaceOneModel<Document>) it.getModel()).getReplacement();
|
||||
maybeEmitEvent(new AfterSaveEvent<>(it.getSource(), target, collectionName));
|
||||
}
|
||||
}
|
||||
|
||||
private <E extends MongoMappingEvent<T>, T> E maybeEmitEvent(E event) {
|
||||
|
||||
if (null != bulkOperationContext.getEventPublisher()) {
|
||||
bulkOperationContext.getEventPublisher().publishEvent(event);
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeConvertCallback(Object value) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeConvertCallback.class, value, collectionName);
|
||||
}
|
||||
|
||||
private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) {
|
||||
|
||||
if (bulkOperationContext.getEntityCallbacks() == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return bulkOperationContext.getEntityCallbacks().callback(BeforeSaveCallback.class, value, mappedDocument,
|
||||
collectionName);
|
||||
}
|
||||
|
||||
private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) {
|
||||
|
||||
BulkWriteOptions options = new BulkWriteOptions();
|
||||
@@ -504,20 +387,5 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
@NonNull Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
@NonNull QueryMapper queryMapper;
|
||||
@NonNull UpdateMapper updateMapper;
|
||||
ApplicationEventPublisher eventPublisher;
|
||||
EntityCallbacks entityCallbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object chaining together an actual source with its {@link WriteModel} representation.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@Value
|
||||
private static class SourceAwareWriteModelHolder {
|
||||
|
||||
Object source;
|
||||
WriteModel<Document> model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,15 +120,19 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return execute(collection -> {
|
||||
|
||||
MongoPersistentEntity<?> entity = lookupPersistentEntity(type, collectionName);
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
|
||||
Document mappedKeys = mapper.getMappedObject(indexDefinition.getIndexKeys(), entity);
|
||||
return collection.createIndex(mappedKeys, indexOptions);
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
ops.partialFilterExpression(mapper.getMappedObject((Document) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY),
|
||||
lookupPersistentEntity(type, collectionName)));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -188,7 +192,7 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
private List<IndexInfo> getIndexData(MongoCursor<Document> cursor) {
|
||||
|
||||
List<IndexInfo> indexInfoList = new ArrayList<>();
|
||||
List<IndexInfo> indexInfoList = new ArrayList<IndexInfo>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
|
||||
@@ -213,25 +217,4 @@ public class DefaultIndexOperations implements IndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, callback);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
mapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,16 +94,23 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
|
||||
return mongoOperations.execute(collectionName, collection -> {
|
||||
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
Document indexOptions = indexDefinition.getIndexOptions();
|
||||
|
||||
IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition);
|
||||
|
||||
indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity);
|
||||
indexOptions = addDefaultCollationIfRequired(indexOptions, entity);
|
||||
if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions);
|
||||
Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
|
||||
MongoPersistentEntity<?> entity = type
|
||||
.map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val))
|
||||
.orElseGet(() -> lookupPersistentEntity(collectionName));
|
||||
|
||||
ops = ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject(indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY, Document.class), entity));
|
||||
}
|
||||
|
||||
return collection.createIndex(indexDefinition.getIndexKeys(), ops);
|
||||
|
||||
}).next();
|
||||
}
|
||||
@@ -119,24 +126,21 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String)
|
||||
*/
|
||||
public Mono<Void> dropIndex(final String name) {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes()
|
||||
*/
|
||||
public Mono<Void> dropAllIndexes() {
|
||||
return dropIndex("*");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo()
|
||||
*/
|
||||
public Flux<IndexInfo> getIndexInfo() {
|
||||
@@ -144,25 +148,4 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations {
|
||||
return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) //
|
||||
.map(IndexConverters.documentToIndexInfoConverter()::convert);
|
||||
}
|
||||
|
||||
private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions,
|
||||
@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY));
|
||||
return ops.partialFilterExpression(
|
||||
queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity));
|
||||
}
|
||||
|
||||
private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (ops.getCollation() != null || entity == null || !entity.hasCollation()) {
|
||||
return ops;
|
||||
}
|
||||
|
||||
return ops.collation(entity.getCollation().toMongoCollation());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,15 +42,13 @@ import com.mongodb.MongoException;
|
||||
import com.mongodb.client.MongoDatabase;
|
||||
|
||||
/**
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}.
|
||||
* Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
class DefaultScriptOperations implements ScriptOperations {
|
||||
|
||||
private static final String SCRIPT_COLLECTION_NAME = "system.js";
|
||||
|
||||
@@ -21,14 +21,12 @@ import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
@@ -36,15 +34,15 @@ import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
|
||||
import com.mongodb.util.JSONParseException;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
*
|
||||
@@ -153,59 +151,15 @@ class EntityOperations {
|
||||
return ID_FIELD;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties.
|
||||
*
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @return the name of the distanceField to use. {@literal dis} by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
public String nearQueryDistanceFieldName(Class<?> domainType) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = context.getPersistentEntity(domainType);
|
||||
if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) {
|
||||
return "dis";
|
||||
}
|
||||
|
||||
String distanceFieldName = "calculated-distance";
|
||||
int counter = 0;
|
||||
while (persistentEntity.getPersistentProperty(distanceFieldName) != null) {
|
||||
distanceFieldName += "-" + (counter++);
|
||||
}
|
||||
|
||||
return distanceFieldName;
|
||||
}
|
||||
|
||||
private static Document parse(String source) {
|
||||
|
||||
try {
|
||||
return Document.parse(source);
|
||||
} catch (org.bson.json.JsonParseException o_O) {
|
||||
} catch (JSONParseException | org.bson.json.JsonParseException o_O) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
} catch (RuntimeException o_O) {
|
||||
|
||||
// legacy 3.x exception
|
||||
if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) {
|
||||
throw new MappingException("Could not parse given String to save into a JSON document!", o_O);
|
||||
}
|
||||
throw o_O;
|
||||
}
|
||||
}
|
||||
|
||||
public <T> TypedOperations<T> forType(@Nullable Class<T> entityClass) {
|
||||
|
||||
if (entityClass != null) {
|
||||
|
||||
MongoPersistentEntity<?> entity = context.getPersistentEntity(entityClass);
|
||||
|
||||
if (entity != null) {
|
||||
return new TypedEntityOperations(entity);
|
||||
}
|
||||
|
||||
}
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of information about an entity.
|
||||
*
|
||||
@@ -235,16 +189,6 @@ class EntityOperations {
|
||||
*/
|
||||
Query getByIdQuery();
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}.
|
||||
*
|
||||
* @return the {@link Query} to use for removing the entity. Never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default Query getRemoveByQuery() {
|
||||
return isVersionedEntity() ? getQueryForVersion() : getByIdQuery();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link Query} to find the entity in its current version.
|
||||
*
|
||||
@@ -275,11 +219,9 @@ class EntityOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}.
|
||||
* Returns the value of the version if the entity has a version property, {@literal null} otherwise.
|
||||
*
|
||||
* @return the entity version. Can be {@literal null}.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check
|
||||
* {@link #isVersionedEntity()}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
Object getVersion();
|
||||
@@ -293,7 +235,7 @@ class EntityOperations {
|
||||
|
||||
/**
|
||||
* Returns whether the entity is considered to be new.
|
||||
*
|
||||
*
|
||||
* @return
|
||||
* @since 2.1.2
|
||||
*/
|
||||
@@ -335,8 +277,8 @@ class EntityOperations {
|
||||
/**
|
||||
* Returns the current version value if the entity has a version property.
|
||||
*
|
||||
* @return the current version or {@literal null} in case it's uninitialized.
|
||||
* @throws IllegalStateException if the entity does not define a {@literal version} property.
|
||||
* @return the current version or {@literal null} in case it's uninitialized or the entity doesn't expose a version
|
||||
* property.
|
||||
*/
|
||||
@Nullable
|
||||
Number getVersion();
|
||||
@@ -444,7 +386,7 @@ class EntityOperations {
|
||||
return map;
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@@ -538,10 +480,10 @@ class EntityOperations {
|
||||
public Query getQueryForVersion() {
|
||||
|
||||
MongoPersistentProperty idProperty = entity.getRequiredIdProperty();
|
||||
MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty();
|
||||
MongoPersistentProperty property = entity.getRequiredVersionProperty();
|
||||
|
||||
return new Query(Criteria.where(idProperty.getName()).is(getId())//
|
||||
.and(versionProperty.getName()).is(getVersion()));
|
||||
.and(property.getName()).is(getVersion()));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -615,7 +557,7 @@ class EntityOperations {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.Entity#isNew()
|
||||
*/
|
||||
@@ -725,102 +667,4 @@ class EntityOperations {
|
||||
return propertyAccessor.getBean();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Type-specific operations abstraction.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @param <T>
|
||||
* @since 2.2
|
||||
*/
|
||||
interface TypedOperations<T> {
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} for the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation();
|
||||
|
||||
/**
|
||||
* Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for
|
||||
* the underlying entity.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom
|
||||
* conversions).
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
enum UntypedOperations implements TypedOperations<Object> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static <T> TypedOperations<T> instance() {
|
||||
return (TypedOperations) INSTANCE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query == null) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} backed by {@link MongoPersistentEntity}.
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class TypedEntityOperations<T> implements TypedOperations<T> {
|
||||
|
||||
private final @NonNull MongoPersistentEntity<T> entity;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation()
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation() {
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.TypedOperations#getCollation(org.springframework.data.mongodb.core.query.Query)
|
||||
*/
|
||||
@Override
|
||||
public Optional<Collation> getCollation(Query query) {
|
||||
|
||||
if (query.getCollation().isPresent()) {
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -33,31 +33,6 @@ public class FindAndModifyOptions {
|
||||
|
||||
private @Nullable Collation collation;
|
||||
|
||||
private static final FindAndModifyOptions NONE = new FindAndModifyOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed. Please use FindAndModifyOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions returnNew(boolean returnNew) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions upsert(boolean upsert) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions remove(boolean remove) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndModifyOptions collation(@Nullable Collation collation) {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a FindAndModifyOptions instance
|
||||
*
|
||||
@@ -67,19 +42,9 @@ public class FindAndModifyOptions {
|
||||
return new FindAndModifyOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndModifyOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndModifyOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link FindAndModifyOptions} based on option of given {@litearl source}.
|
||||
*
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return new instance of {@link FindAndModifyOptions}.
|
||||
* @since 2.0
|
||||
|
||||
@@ -36,21 +36,6 @@ public class FindAndReplaceOptions {
|
||||
private boolean returnNew;
|
||||
private boolean upsert;
|
||||
|
||||
private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() {
|
||||
|
||||
private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed. Please use FindAndReplaceOptions.options() instead.";
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions returnNew() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindAndReplaceOptions upsert() {
|
||||
throw new UnsupportedOperationException(ERROR_MSG);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance.
|
||||
* <dl>
|
||||
@@ -66,16 +51,6 @@ public class FindAndReplaceOptions {
|
||||
return new FindAndReplaceOptions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
*
|
||||
* @return unmodifiable {@link FindAndReplaceOptions} instance.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static FindAndReplaceOptions none() {
|
||||
return NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link FindAndReplaceOptions} instance with
|
||||
* <dl>
|
||||
|
||||
@@ -21,8 +21,9 @@ import com.mongodb.reactivestreams.client.FindPublisher;
|
||||
* Simple callback interface to allow customization of a {@link FindPublisher}.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @author Konstantin Volivach
|
||||
*/
|
||||
interface FindPublisherPreparer {
|
||||
public interface FindPublisherPreparer {
|
||||
|
||||
/**
|
||||
* Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor.
|
||||
|
||||
@@ -92,7 +92,7 @@ public class MappedDocument {
|
||||
* mapped to the specific domain type.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @since 2.1.4
|
||||
*/
|
||||
class MappedUpdate implements UpdateDefinition {
|
||||
|
||||
@@ -137,14 +137,5 @@ public class MappedDocument {
|
||||
public Boolean isIsolated() {
|
||||
return delegate.isIsolated();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#getArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,210 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
* domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names}
|
||||
* and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* org.springframework.data.mongodb.core.MongoJsonSchemaCreator#createSchemaFor(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public MongoJsonSchema createSchemaFor(Class<?> type) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty nested : entity) {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
break;
|
||||
}
|
||||
|
||||
currentPath.add(nested);
|
||||
schemaProperties.add(computeSchemaForProperty(currentPath));
|
||||
}
|
||||
|
||||
return schemaProperties;
|
||||
}
|
||||
|
||||
private JsonSchemaProperty computeSchemaForProperty(List<MongoPersistentProperty> path) {
|
||||
|
||||
MongoPersistentProperty property = CollectionUtils.lastElement(path);
|
||||
|
||||
boolean required = isRequiredProperty(property);
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
MongoPersistentProperty property, boolean required) {
|
||||
|
||||
ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName());
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(path,
|
||||
mappingContext.getRequiredPersistentEntity(property));
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) {
|
||||
return createSchemaProperty(fieldName, type, required, Collections.emptyList());
|
||||
}
|
||||
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
|
||||
return property instanceof MongoPersistentProperty ? ((MongoPersistentProperty) property).getFieldName()
|
||||
: property.getName();
|
||||
}
|
||||
|
||||
private boolean isRequiredProperty(PersistentProperty property) {
|
||||
return property.getType().isPrimitive();
|
||||
}
|
||||
|
||||
private Class<?> computeTargetType(PersistentProperty<?> property) {
|
||||
|
||||
if (!(property instanceof MongoPersistentProperty)) {
|
||||
return property.getType();
|
||||
}
|
||||
|
||||
MongoPersistentProperty mongoProperty = (MongoPersistentProperty) property;
|
||||
if (!mongoProperty.isIdProperty()) {
|
||||
return mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
if (mongoProperty.hasExplicitWriteTarget()) {
|
||||
return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass();
|
||||
}
|
||||
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
@@ -22,7 +22,6 @@ import org.springframework.beans.factory.config.AbstractFactoryBean;
|
||||
import org.springframework.data.mongodb.MongoDbFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.DBDecoderFactory;
|
||||
import com.mongodb.DBEncoderFactory;
|
||||
import com.mongodb.MongoClient;
|
||||
@@ -42,8 +41,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
|
||||
private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build();
|
||||
|
||||
// TODO: Mongo Driver 4 - use application name insetad of description if not available
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getApplicationName();
|
||||
private @Nullable String description = DEFAULT_MONGO_OPTIONS.getDescription();
|
||||
private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost();
|
||||
private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost();
|
||||
private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS
|
||||
@@ -53,8 +51,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime();
|
||||
private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout();
|
||||
private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout();
|
||||
|
||||
// TODO: Mongo Driver 4 - check if available
|
||||
private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive();
|
||||
private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference();
|
||||
private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory();
|
||||
@@ -62,8 +58,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern();
|
||||
private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory();
|
||||
private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled();
|
||||
|
||||
// TODO: Mongo Driver 4 - remove this option
|
||||
private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans();
|
||||
private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency();
|
||||
private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency();
|
||||
@@ -74,14 +68,12 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
|
||||
private boolean ssl;
|
||||
private @Nullable SSLSocketFactory sslSocketFactory;
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
|
||||
/**
|
||||
* Set the {@link MongoClient} description.
|
||||
*
|
||||
* @param description
|
||||
*/
|
||||
// TODO: Mongo Driver 4 - deprecate that one and add application name
|
||||
public void setDescription(@Nullable String description) {
|
||||
this.description = description;
|
||||
}
|
||||
@@ -243,7 +235,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
}
|
||||
|
||||
/**
|
||||
* This controls if the driver should us an SSL connection. Defaults to {@literal false}.
|
||||
* This controls if the driver should us an SSL connection. Defaults to |@literal false}.
|
||||
*
|
||||
* @param ssl
|
||||
*/
|
||||
@@ -274,16 +266,6 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
this.serverSelectionTimeout = serverSelectionTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link AutoEncryptionSettings} to be used.
|
||||
*
|
||||
* @param autoEncryptionSettings can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) {
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance()
|
||||
@@ -303,7 +285,7 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
.cursorFinalizerEnabled(cursorFinalizerEnabled) //
|
||||
.dbDecoderFactory(dbDecoderFactory) //
|
||||
.dbEncoderFactory(dbEncoderFactory) //
|
||||
.applicationName(description) // TODO: Mongo Driver 4 - use application name if description not available
|
||||
.description(description) //
|
||||
.heartbeatConnectTimeout(heartbeatConnectTimeout) //
|
||||
.heartbeatFrequency(heartbeatFrequency) //
|
||||
.heartbeatSocketTimeout(heartbeatSocketTimeout) //
|
||||
@@ -315,10 +297,8 @@ public class MongoClientOptionsFactoryBean extends AbstractFactoryBean<MongoClie
|
||||
.readPreference(readPreference) //
|
||||
.requiredReplicaSetName(requiredReplicaSetName) //
|
||||
.serverSelectionTimeout(serverSelectionTimeout) //
|
||||
.sslEnabled(ssl) //
|
||||
.autoEncryptionSettings(autoEncryptionSettings) //
|
||||
.socketFactory(socketFactoryToUse) // TODO: Mongo Driver 4 -
|
||||
.socketKeepAlive(socketKeepAlive) // TODO: Mongo Driver 4 - remove if not available
|
||||
.socketFactory(socketFactoryToUse) //
|
||||
.socketKeepAlive(socketKeepAlive) //
|
||||
.socketTimeout(socketTimeout) //
|
||||
.threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) //
|
||||
.writeConcern(writeConcern).build();
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.bson.BsonDocument;
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.AutoEncryptionSettings;
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link AutoEncryptionSettings} using the {@link AutoEncryptionSettings.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class MongoEncryptionSettingsFactoryBean implements FactoryBean<AutoEncryptionSettings> {
|
||||
|
||||
private boolean bypassAutoEncryption;
|
||||
private String keyVaultNamespace;
|
||||
private Map<String, Object> extraOptions;
|
||||
private MongoClientSettings keyVaultClientSettings;
|
||||
private Map<String, Map<String, Object>> kmsProviders;
|
||||
private Map<String, BsonDocument> schemaMap;
|
||||
|
||||
/**
|
||||
* @param bypassAutoEncryption
|
||||
* @see AutoEncryptionSettings.Builder#bypassAutoEncryption(boolean)
|
||||
*/
|
||||
public void setBypassAutoEncryption(boolean bypassAutoEncryption) {
|
||||
this.bypassAutoEncryption = bypassAutoEncryption;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param extraOptions
|
||||
* @see AutoEncryptionSettings.Builder#extraOptions(Map)
|
||||
*/
|
||||
public void setExtraOptions(Map<String, Object> extraOptions) {
|
||||
this.extraOptions = extraOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultNamespace
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultNamespace(String)
|
||||
*/
|
||||
public void setKeyVaultNamespace(String keyVaultNamespace) {
|
||||
this.keyVaultNamespace = keyVaultNamespace;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param keyVaultClientSettings
|
||||
* @see AutoEncryptionSettings.Builder#keyVaultMongoClientSettings(MongoClientSettings)
|
||||
*/
|
||||
public void setKeyVaultClientSettings(MongoClientSettings keyVaultClientSettings) {
|
||||
this.keyVaultClientSettings = keyVaultClientSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param kmsProviders
|
||||
* @see AutoEncryptionSettings.Builder#kmsProviders(Map)
|
||||
*/
|
||||
public void setKmsProviders(Map<String, Map<String, Object>> kmsProviders) {
|
||||
this.kmsProviders = kmsProviders;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param schemaMap
|
||||
* @see AutoEncryptionSettings.Builder#schemaMap(Map)
|
||||
*/
|
||||
public void setSchemaMap(Map<String, BsonDocument> schemaMap) {
|
||||
this.schemaMap = schemaMap;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public AutoEncryptionSettings getObject() {
|
||||
|
||||
return AutoEncryptionSettings.builder() //
|
||||
.bypassAutoEncryption(bypassAutoEncryption) //
|
||||
.keyVaultNamespace(keyVaultNamespace) //
|
||||
.keyVaultMongoClientSettings(keyVaultClientSettings) //
|
||||
.kmsProviders(orEmpty(kmsProviders)) //
|
||||
.extraOptions(orEmpty(extraOptions)) //
|
||||
.schemaMap(orEmpty(schemaMap)) //
|
||||
.build();
|
||||
}
|
||||
|
||||
private <K, V> Map<K, V> orEmpty(@Nullable Map<K, V> source) {
|
||||
return source != null ? source : Collections.emptyMap();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return AutoEncryptionSettings.class;
|
||||
}
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
* <strong>Ignored Properties</strong>
|
||||
* <ul>
|
||||
* <li>All properties annotated with {@link org.springframework.data.annotation.Transient}</li>
|
||||
* </ul>
|
||||
* <strong>Property Type Mapping</strong>
|
||||
* <ul>
|
||||
* <li>{@link java.lang.Object} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.util.Arrays} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Collection} -> {@code type : 'array'}</li>
|
||||
* <li>{@link java.util.Map} -> {@code type : 'object'}</li>
|
||||
* <li>{@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}</li>
|
||||
* <li>Simple Types -> {@code type : 'the corresponding bson type' }</li>
|
||||
* <li>Domain Types -> {@code type : 'object', properties : {the types properties} }</li>
|
||||
* </ul>
|
||||
* <br />
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface MongoJsonSchemaCreator {
|
||||
|
||||
/**
|
||||
* Create the {@link MongoJsonSchema} for the given {@link Class type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
*
|
||||
* @param mongoConverter must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MongoConverter mongoConverter) {
|
||||
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
}
|
||||
@@ -362,9 +362,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*
|
||||
* @return
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
ScriptOperations scriptOps();
|
||||
|
||||
/**
|
||||
@@ -436,11 +434,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(String inputCollectionName, GroupBy groupBy, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
@@ -455,12 +449,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* reduce function.
|
||||
* @param entityClass The parametrized type of the returned list
|
||||
* @return The results of the group operation
|
||||
* @deprecated since 2.2. The {@code group} command has been removed in MongoDB Server 4.2.0. <br />
|
||||
* Please use {@link #aggregate(TypedAggregation, String, Class) } with a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.GroupOperation} and
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GroupByResults<T> group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy,
|
||||
Class<T> entityClass);
|
||||
|
||||
@@ -648,52 +637,24 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* information to determine the collection the query is ran against. Note, that MongoDB limits the number of results
|
||||
* by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of
|
||||
* results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* AggregationResults<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected. Must not be {@literal null} nor empty.
|
||||
* @return
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> GeoResults<T> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -1140,11 +1101,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1156,11 +1112,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1171,11 +1123,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1190,11 +1138,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1252,11 +1200,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1270,7 +1218,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
@@ -1284,7 +1232,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1300,6 +1250,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1312,8 +1265,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
*
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, FindAndModifyOptions, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
* @param update the update document that contains the updated object or $ operators to manipulate the existing
|
||||
@@ -1326,7 +1281,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1342,6 +1299,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1355,6 +1315,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1409,10 +1371,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
UpdateResult updateMulti(Query query, Update update, Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Remove the given object from the collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Remove the given object from the collection by id.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @return the {@link DeleteResult} which lets you access the results of the previous delete.
|
||||
@@ -1420,10 +1379,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
DeleteResult remove(Object object);
|
||||
|
||||
/**
|
||||
* Removes the given object from the given collection by {@literal id} and (if applicable) its
|
||||
* {@link org.springframework.data.annotation.Version}. <br />
|
||||
* Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged()
|
||||
* acknowledged} remove operation was successful or not.
|
||||
* Removes the given object from the given collection.
|
||||
*
|
||||
* @param object must not be {@literal null}.
|
||||
* @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -66,8 +66,12 @@ class PropertyOperations {
|
||||
projectionInformation.getInputProperties().forEach(it -> projectedFields.append(it.getName(), 1));
|
||||
}
|
||||
} else {
|
||||
mappingContext.getRequiredPersistentEntity(targetType).doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(targetType);
|
||||
if (entity != null) {
|
||||
entity.doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
}
|
||||
}
|
||||
|
||||
return projectedFields;
|
||||
|
||||
@@ -1,200 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStreamOperation} allows creation and execution of reactive MongoDB
|
||||
* <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> operations in a fluent API style. <br />
|
||||
* The starting {@literal domainType} is used for mapping a potentially given
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} used for filtering. By default, the
|
||||
* originating {@literal domainType} is also used for mapping back the result from the {@link org.bson.Document}.
|
||||
* However, it is possible to define an different {@literal returnType} via {@code as}.<br />
|
||||
* The collection to operate on is optional in which case call collection with the actual database are watched, use
|
||||
* {@literal watchCollection} to define a fixed collection.
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* changeStream(Jedi.class)
|
||||
* .watchCollection("star-wars")
|
||||
* .filter(where("operationType").is("insert"))
|
||||
* .resumeAt(Instant.now())
|
||||
* .listen();
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public interface ReactiveChangeStreamOperation {
|
||||
|
||||
/**
|
||||
* Start creating a change stream operation for the given {@literal domainType} watching all collections within the
|
||||
* database. <br />
|
||||
* Consider limiting events be defining a {@link ChangeStreamWithCollection#watchCollection(String) collection} and/or
|
||||
* {@link ChangeStreamWithFilterAndProjection#filter(CriteriaDefinition) filter}.
|
||||
*
|
||||
* @param domainType must not be {@literal null}. Use {@link org.bson.Document} to obtain raw elements.
|
||||
* @return new instance of {@link ReactiveChangeStream}. Never {@literal null}.
|
||||
* @throws IllegalArgumentException if domainType is {@literal null}.
|
||||
*/
|
||||
<T> ReactiveChangeStream<T> changeStream(Class<T> domainType);
|
||||
|
||||
/**
|
||||
* Compose change stream execution by calling one of the terminating methods.
|
||||
*/
|
||||
interface TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
|
||||
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
|
||||
*/
|
||||
Flux<ChangeStreamEvent<T>> listen();
|
||||
}
|
||||
|
||||
/**
|
||||
* Collection override (optional).
|
||||
*/
|
||||
interface ChangeStreamWithCollection<T> {
|
||||
|
||||
/**
|
||||
* Explicitly set the name of the collection to watch.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param collection must not be {@literal null} nor {@literal empty}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code collection} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(String collection);
|
||||
|
||||
/**
|
||||
* Set the the collection to watch. Collection name is derived from the {@link Class entityClass}.<br />
|
||||
* Skip this step to watch all collections within the database.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if {@code entityClass} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a filter for limiting results (optional).
|
||||
*/
|
||||
interface ChangeStreamWithFilterAndProjection<T> extends ResumingChangeStream<T>, TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Use an {@link Aggregation} to filter matching events.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link Aggregation} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(Aggregation by);
|
||||
|
||||
/**
|
||||
* Use a {@link CriteriaDefinition critera} to filter matching events via an
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.MatchOperation}.
|
||||
*
|
||||
* @param by must not be {@literal null}.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if the given {@link CriteriaDefinition} is {@literal null}.
|
||||
*/
|
||||
ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by);
|
||||
|
||||
/**
|
||||
* Define the target type fields should be mapped to.
|
||||
*
|
||||
* @param resultType must not be {@literal null}.
|
||||
* @param <R> result type.
|
||||
* @return new instance of {@link ChangeStreamWithFilterAndProjection}.
|
||||
* @throws IllegalArgumentException if resultType is {@literal null}.
|
||||
*/
|
||||
<R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume a change stream. (optional).
|
||||
*/
|
||||
interface ResumingChangeStream<T> extends TerminatingChangeStream<T> {
|
||||
|
||||
/**
|
||||
* Resume the change stream at a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(Instant)
|
||||
* @see ChangeStreamOptionsBuilder#resumeAt(BsonTimestamp)
|
||||
* @throws IllegalArgumentException if the given beacon is neither {@link Instant} nor {@link BsonTimestamp}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAt(Object token);
|
||||
|
||||
/**
|
||||
* Resume the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#resumeAfter(BsonValue)
|
||||
* @see ChangeStreamOptionsBuilder#resumeToken(BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> resumeAfter(Object token);
|
||||
|
||||
/**
|
||||
* Start the change stream after a given point.
|
||||
*
|
||||
* @param token an {@link Instant} or {@link BsonTimestamp}
|
||||
* @return new instance of {@link TerminatingChangeStream}.
|
||||
* @see ChangeStreamOptionsBuilder#startAfter(BsonValue) (BsonValue)
|
||||
* @throws IllegalArgumentException if the given beacon not a {@link BsonValue}.
|
||||
*/
|
||||
TerminatingChangeStream<T> startAfter(Object token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide some options.
|
||||
*/
|
||||
interface ChangeStreamWithOptions<T> {
|
||||
|
||||
/**
|
||||
* Provide some options via the callback by modifying the given {@link ChangeStreamOptionsBuilder}. Previously
|
||||
* defined options like a {@link ResumingChangeStream#resumeAfter(Object) resumeToken} are carried over to the
|
||||
* builder and can be overwritten via eg. {@link ChangeStreamOptionsBuilder#resumeToken(BsonValue)}.
|
||||
*
|
||||
* @param optionsConsumer never {@literal null}.
|
||||
* @return new instance of {@link ReactiveChangeStream}.
|
||||
*/
|
||||
ReactiveChangeStream<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ReactiveChangeStream} provides methods for constructing change stream operations in a fluent way.
|
||||
*/
|
||||
interface ReactiveChangeStream<T> extends ChangeStreamWithOptions<T>, ChangeStreamWithCollection<T>,
|
||||
TerminatingChangeStream<T>, ResumingChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {}
|
||||
}
|
||||
@@ -1,230 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.MatchOperation;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperation {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
/**
|
||||
* @param template must not be {@literal null}.
|
||||
*/
|
||||
ReactiveChangeStreamOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation#changeStream(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <T> ReactiveChangeStream<T> changeStream(Class<T> domainType) {
|
||||
|
||||
Assert.notNull(domainType, "DomainType must not be null!");
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null);
|
||||
}
|
||||
|
||||
static class ReactiveChangeStreamSupport<T>
|
||||
implements ReactiveChangeStream<T>, ChangeStreamWithFilterAndProjection<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final @Nullable String collection;
|
||||
private final @Nullable ChangeStreamOptions options;
|
||||
|
||||
private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
@Nullable String collection, @Nullable ChangeStreamOptions options) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection name must not be null nor empty!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithCollection#watchCollection(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> watchCollection(Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Collection type not be null!");
|
||||
|
||||
return watchCollection(template.getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAt(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAt(Object token) {
|
||||
|
||||
return withOptions(builder -> {
|
||||
|
||||
if (token instanceof Instant) {
|
||||
builder.resumeAt((Instant) token);
|
||||
} else if (token instanceof BsonTimestamp) {
|
||||
builder.resumeAt((BsonTimestamp) token);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#resumeAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> resumeAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.resumeAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ResumingChangeStream#startAfter(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public TerminatingChangeStream<T> startAfter(Object token) {
|
||||
|
||||
Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue");
|
||||
|
||||
return withOptions(builder -> builder.startAfter((BsonValue) token));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithOptions#withOptions(java.util.function.Consumer)
|
||||
*/
|
||||
@Override
|
||||
public ReactiveChangeStreamSupport<T> withOptions(Consumer<ChangeStreamOptionsBuilder> optionsConsumer) {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = initOptionsBuilder();
|
||||
optionsConsumer.accept(builder);
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, builder.build());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithProjection#as(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public <R> ChangeStreamWithFilterAndProjection<R> as(Class<R> resultType) {
|
||||
|
||||
Assert.notNull(resultType, "ResultType must not be null!");
|
||||
|
||||
return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.aggregation.Aggregation)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(Aggregation filter) {
|
||||
return withOptions(builder -> builder.filter(filter));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.ChangeStreamWithFilter#filter(org.springframework.data.mongodb.core.query.CriteriaDefinition)
|
||||
*/
|
||||
@Override
|
||||
public ChangeStreamWithFilterAndProjection<T> filter(CriteriaDefinition by) {
|
||||
|
||||
MatchOperation $match = Aggregation.match(by);
|
||||
Aggregation aggregation = !Document.class.equals(domainType) ? Aggregation.newAggregation(domainType, $match)
|
||||
: Aggregation.newAggregation($match);
|
||||
return filter(aggregation);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveChangeStreamOperation.TerminatingChangeStream#listen()
|
||||
*/
|
||||
@Override
|
||||
public Flux<ChangeStreamEvent<T>> listen() {
|
||||
return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType);
|
||||
}
|
||||
|
||||
private ChangeStreamOptionsBuilder initOptionsBuilder() {
|
||||
|
||||
ChangeStreamOptionsBuilder builder = ChangeStreamOptions.builder();
|
||||
if (options == null) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
options.getFilter().ifPresent(it -> {
|
||||
if (it instanceof Aggregation) {
|
||||
builder.filter((Aggregation) it);
|
||||
} else {
|
||||
builder.filter(((List<Document>) it).toArray(new Document[0]));
|
||||
}
|
||||
});
|
||||
options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup);
|
||||
options.getCollation().ifPresent(builder::collation);
|
||||
|
||||
if (options.isResumeAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::resumeAfter);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAfter);
|
||||
} else if (options.isStartAfter()) {
|
||||
options.getResumeToken().ifPresent(builder::startAfter);
|
||||
} else {
|
||||
options.getResumeTimestamp().ifPresent(builder::resumeAt);
|
||||
options.getResumeBsonTimestamp().ifPresent(builder::resumeAt);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,4 +23,4 @@ package org.springframework.data.mongodb.core;
|
||||
* @since 2.0
|
||||
*/
|
||||
public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation,
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation, ReactiveChangeStreamOperation {}
|
||||
ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation {}
|
||||
|
||||
@@ -40,7 +40,6 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.transaction.reactive.TransactionalOperator;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
@@ -221,9 +220,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
*
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction();
|
||||
|
||||
/**
|
||||
@@ -238,9 +235,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param sessionProvider must not be {@literal null}.
|
||||
* @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}.
|
||||
* @since 2.1
|
||||
* @deprecated since 2.2. Use {@code @Transactional} or {@link TransactionalOperator}.
|
||||
*/
|
||||
@Deprecated
|
||||
ReactiveSessionScoped inTransaction(Publisher<ClientSession> sessionProvider);
|
||||
|
||||
/**
|
||||
@@ -624,52 +619,24 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the
|
||||
* number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a
|
||||
* particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB
|
||||
* limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect
|
||||
* a particular number of results.
|
||||
* <p>
|
||||
* MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the
|
||||
* {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using
|
||||
* aggregations directly:
|
||||
* </p>
|
||||
*
|
||||
* <pre class="code">
|
||||
* TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
|
||||
* .withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
* Flux<Document> results = aggregate(geoNear, Document.class);
|
||||
* </pre>
|
||||
*
|
||||
* @param near must not be {@literal null}.
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @param collectionName the collection to trigger the query against. If no collection name is given the entity class
|
||||
* will be inspected.
|
||||
* @return the converted {@link GeoResult}s.
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with
|
||||
* {@link Aggregation#geoNear(NearQuery, String)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
@@ -920,11 +887,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -936,11 +898,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -951,11 +909,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* class to map the given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -970,11 +924,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1030,11 +984,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
@@ -1078,11 +1032,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1096,7 +1050,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
@@ -1115,11 +1069,11 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1133,7 +1087,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
@@ -1147,7 +1101,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1163,6 +1119,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1175,7 +1134,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Performs an upsert. If no document is found that matches the query, a new document is created and inserted by
|
||||
* combining the query document and the update document.
|
||||
* combining the query document and the update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be upserted. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1189,7 +1150,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Updates the first object that is found in the collection of the entity class that matches the query document with
|
||||
* the provided update document.
|
||||
* the provided update document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1205,6 +1168,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> Any additional support for field mapping, versions, etc. is not available due to the lack of
|
||||
* domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1218,6 +1184,8 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Updates the first object that is found in the specified collection that matches the query document criteria with
|
||||
* the provided updated document. <br />
|
||||
* <strong>NOTE:</strong> {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}.
|
||||
* Use {@link #findAndModify(Query, Update, Class, String)} instead.
|
||||
*
|
||||
* @param query the query document that specifies the criteria used to select a record to be updated. Must not be
|
||||
* {@literal null}.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -123,7 +123,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
return template.findAndModify(query, update, findAndModifyOptions != null ? findAndModifyOptions : FindAndModifyOptions.none(), targetType, collectionName);
|
||||
return template.findAndModify(query, update, findAndModifyOptions, targetType, collectionName);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -133,7 +133,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
@Override
|
||||
public Mono<T> findAndReplace() {
|
||||
return template.findAndReplace(query, replacement,
|
||||
findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.none(), (Class) domainType,
|
||||
findAndReplaceOptions != null ? findAndReplaceOptions : new FindAndReplaceOptions(), (Class) domainType,
|
||||
getCollectionName(), targetType);
|
||||
}
|
||||
|
||||
|
||||
@@ -29,9 +29,7 @@ import com.mongodb.DB;
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
* @since 1.7
|
||||
* @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0.
|
||||
*/
|
||||
@Deprecated
|
||||
public interface ScriptOperations {
|
||||
|
||||
/**
|
||||
|
||||
@@ -211,15 +211,6 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
return delegate.withSession(session);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#isTransactionActive()
|
||||
*/
|
||||
@Override
|
||||
public boolean isTransactionActive() {
|
||||
return session != null && session.hasActiveTransaction();
|
||||
}
|
||||
|
||||
private MongoDatabase decorateDatabase(MongoDatabase database) {
|
||||
return createProxyInstance(session, database, MongoDatabase.class);
|
||||
}
|
||||
|
||||
@@ -213,20 +213,6 @@ public class AccumulatorOperators {
|
||||
return new Sum(append(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Sum} with all previously added arguments appending the given one. <br />
|
||||
* <strong>NOTE:</strong> Only possible in {@code $project} stage.
|
||||
*
|
||||
* @param value the value to add.
|
||||
* @return new instance of {@link Sum}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Sum and(Number value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new Sum(append(value));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
|
||||
@@ -244,19 +244,6 @@ public class Aggregation {
|
||||
return new ProjectionOperation(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including all top level fields of the given given {@link Class}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return new instance of {@link ProjectionOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ProjectionOperation project(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
return new ProjectionOperation(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory method to create a new {@link UnwindOperation} for the field with the given name.
|
||||
*
|
||||
@@ -625,7 +612,7 @@ public class Aggregation {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
|
||||
@@ -15,9 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
/**
|
||||
@@ -35,23 +32,7 @@ public interface AggregationOperation {
|
||||
* Turns the {@link AggregationOperation} into a {@link Document} by using the given
|
||||
* {@link AggregationOperationContext}.
|
||||
*
|
||||
* @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}.
|
||||
* @return the Document
|
||||
* @deprecated since 2.2 in favor of {@link #toPipelineStages(AggregationOperationContext)}.
|
||||
*/
|
||||
@Deprecated
|
||||
Document toDocument(AggregationOperationContext context);
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationOperation} into list of {@link Document stages} by using the given
|
||||
* {@link AggregationOperationContext}. This allows a single {@link AggregationOptions} to add additional stages for
|
||||
* eg. {@code $sort} or {@code $limit}.
|
||||
*
|
||||
* @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}.
|
||||
* @return the pipeline stages to run through. Never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default List<Document> toPipelineStages(AggregationOperationContext context) {
|
||||
return Collections.singletonList(toDocument(context));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,17 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
/**
|
||||
* The context for an {@link AggregationOperation}.
|
||||
@@ -42,20 +33,7 @@ public interface AggregationOperationContext {
|
||||
* @param document will never be {@literal null}.
|
||||
* @return must not be {@literal null}.
|
||||
*/
|
||||
default Document getMappedObject(Document document) {
|
||||
return getMappedObject(document, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the mapped {@link Document}, potentially converting the source considering mapping metadata for the given
|
||||
* type.
|
||||
*
|
||||
* @param document will never be {@literal null}.
|
||||
* @param type can be {@literal null}.
|
||||
* @return must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
Document getMappedObject(Document document, @Nullable Class<?> type);
|
||||
Document getMappedObject(Document document);
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} for the given field or {@literal null} if the context does not expose the given
|
||||
@@ -74,33 +52,4 @@ public interface AggregationOperationContext {
|
||||
* @return
|
||||
*/
|
||||
FieldReference getReference(String name);
|
||||
|
||||
/**
|
||||
* Returns the {@link Fields} exposed by the type. May be a {@literal class} or an {@literal interface}. The default
|
||||
* implementation uses {@link BeanUtils#getPropertyDescriptors(Class) property descriptors} discover fields from a
|
||||
* {@link Class}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.2
|
||||
* @see BeanUtils#getPropertyDescriptor(Class, String)
|
||||
*/
|
||||
default Fields getFields(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
return Fields.fields(Arrays.stream(BeanUtils.getPropertyDescriptors(type)) //
|
||||
.filter(it -> { // object and default methods
|
||||
Method method = it.getReadMethod();
|
||||
if (method == null) {
|
||||
return false;
|
||||
}
|
||||
if (ReflectionUtils.isObjectMethod(method)) {
|
||||
return false;
|
||||
}
|
||||
return !method.isDefault();
|
||||
}) //
|
||||
.map(PropertyDescriptor::getName) //
|
||||
.toArray(String[]::new));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,6 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedFi
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.Fields.AggregationField;
|
||||
import org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Rendering support for {@link AggregationOperation} into a {@link List} of {@link org.bson.Document}.
|
||||
@@ -53,7 +52,7 @@ class AggregationOperationRenderer {
|
||||
|
||||
for (AggregationOperation operation : operations) {
|
||||
|
||||
operationDocuments.addAll(operation.toPipelineStages(contextToUse));
|
||||
operationDocuments.add(operation.toDocument(contextToUse));
|
||||
|
||||
if (operation instanceof FieldsExposingAggregationOperation) {
|
||||
|
||||
@@ -76,16 +75,15 @@ class AggregationOperationRenderer {
|
||||
* Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static class NoOpAggregationOperationContext implements AggregationOperationContext {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
public Document getMappedObject(Document document) {
|
||||
return document;
|
||||
}
|
||||
|
||||
|
||||
@@ -44,13 +44,11 @@ public class AggregationOptions {
|
||||
private static final String EXPLAIN = "explain";
|
||||
private static final String ALLOW_DISK_USE = "allowDiskUse";
|
||||
private static final String COLLATION = "collation";
|
||||
private static final String COMMENT = "comment";
|
||||
|
||||
private final boolean allowDiskUse;
|
||||
private final boolean explain;
|
||||
private final Optional<Document> cursor;
|
||||
private final Optional<Collation> collation;
|
||||
private final Optional<String> comment;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
@@ -75,28 +73,11 @@ public class AggregationOptions {
|
||||
*/
|
||||
public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor,
|
||||
@Nullable Collation collation) {
|
||||
this(allowDiskUse, explain, cursor, collation, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
*
|
||||
* @param allowDiskUse whether to off-load intensive sort-operations to disk.
|
||||
* @param explain whether to get the execution plan for the aggregation instead of the actual results.
|
||||
* @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the
|
||||
* aggregation.
|
||||
* @param collation collation for string comparison. Can be {@literal null}.
|
||||
* @param comment execution comment. Can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor,
|
||||
@Nullable Collation collation, @Nullable String comment) {
|
||||
|
||||
this.allowDiskUse = allowDiskUse;
|
||||
this.explain = explain;
|
||||
this.cursor = Optional.ofNullable(cursor);
|
||||
this.collation = Optional.ofNullable(collation);
|
||||
this.comment = Optional.ofNullable(comment);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -127,9 +108,8 @@ public class AggregationOptions {
|
||||
Document cursor = document.get(CURSOR, Document.class);
|
||||
Collation collation = document.containsKey(COLLATION) ? Collation.from(document.get(COLLATION, Document.class))
|
||||
: null;
|
||||
String comment = document.getString(COMMENT);
|
||||
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -196,16 +176,6 @@ public class AggregationOptions {
|
||||
return collation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the comment for the aggregation.
|
||||
*
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<String> getComment() {
|
||||
return comment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration
|
||||
* applied.
|
||||
@@ -249,7 +219,6 @@ public class AggregationOptions {
|
||||
|
||||
cursor.ifPresent(val -> document.put(CURSOR, val));
|
||||
collation.ifPresent(val -> document.append(COLLATION, val.toDocument()));
|
||||
comment.ifPresent(val -> document.append(COMMENT, val));
|
||||
|
||||
return document;
|
||||
}
|
||||
@@ -278,7 +247,6 @@ public class AggregationOptions {
|
||||
private boolean explain;
|
||||
private @Nullable Document cursor;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable String comment;
|
||||
|
||||
/**
|
||||
* Defines whether to off-load intensive sort-operations to disk.
|
||||
@@ -334,7 +302,6 @@ public class AggregationOptions {
|
||||
*
|
||||
* @param collation can be {@literal null}.
|
||||
* @return
|
||||
* @since 2.0
|
||||
*/
|
||||
public Builder collation(@Nullable Collation collation) {
|
||||
|
||||
@@ -342,26 +309,13 @@ public class AggregationOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a comment to describe the execution.
|
||||
*
|
||||
* @param comment can be {@literal null}.
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
public Builder comment(@Nullable String comment) {
|
||||
|
||||
this.comment = comment;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions} instance with the given configuration.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public AggregationOptions build() {
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
return new AggregationOptions(allowDiskUse, explain, cursor, collation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@@ -34,7 +33,6 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Shashank Sharma
|
||||
* @since 1.0
|
||||
*/
|
||||
public class ArrayOperators {
|
||||
@@ -59,25 +57,13 @@ public class ArrayOperators {
|
||||
return new ArrayOperatorFactory(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the given {@link Collection values} {@link AggregationExpression}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayOperatorFactory}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ArrayOperatorFactory arrayOf(Collection<?> values) {
|
||||
return new ArrayOperatorFactory(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class ArrayOperatorFactory {
|
||||
|
||||
private final @Nullable String fieldReference;
|
||||
private final @Nullable AggregationExpression expression;
|
||||
private final @Nullable Collection values;
|
||||
private final String fieldReference;
|
||||
private final AggregationExpression expression;
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}.
|
||||
@@ -89,7 +75,6 @@ public class ArrayOperators {
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
this.fieldReference = fieldReference;
|
||||
this.expression = null;
|
||||
this.values = null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -102,21 +87,6 @@ public class ArrayOperators {
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
this.fieldReference = null;
|
||||
this.expression = expression;
|
||||
this.values = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayOperatorFactory} for given values.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ArrayOperatorFactory(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
this.fieldReference = null;
|
||||
this.expression = null;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -157,12 +127,7 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
private ArrayElemAt createArrayElemAt() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return ArrayElemAt.arrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ArrayElemAt.arrayOf(expression) : ArrayElemAt.arrayOf(values);
|
||||
return usesFieldRef() ? ArrayElemAt.arrayOf(fieldReference) : ArrayElemAt.arrayOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -192,12 +157,7 @@ public class ArrayOperators {
|
||||
}
|
||||
|
||||
private ConcatArrays createConcatArrays() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return ConcatArrays.arrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ConcatArrays.arrayOf(expression) : ConcatArrays.arrayOf(values);
|
||||
return usesFieldRef() ? ConcatArrays.arrayOf(fieldReference) : ConcatArrays.arrayOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -207,13 +167,7 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public AsBuilder filter() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Filter.filter(fieldReference);
|
||||
}
|
||||
|
||||
Assert.state(values != null, "Values must not be null!");
|
||||
return Filter.filter(new ArrayList<>(values));
|
||||
return Filter.filter(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -222,9 +176,6 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public IsArray isArray() {
|
||||
|
||||
Assert.state(values == null, "Does it make sense to call isArray on an array? Maybe just skip it?");
|
||||
|
||||
return usesFieldRef() ? IsArray.isArray(fieldReference) : IsArray.isArray(expression);
|
||||
}
|
||||
|
||||
@@ -234,12 +185,7 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public Size length() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Size.lengthOfArray(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? Size.lengthOfArray(expression) : Size.lengthOfArray(values);
|
||||
return usesFieldRef() ? Size.lengthOfArray(fieldReference) : Size.lengthOfArray(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -248,12 +194,7 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public Slice slice() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Slice.sliceArrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? Slice.sliceArrayOf(expression) : Slice.sliceArrayOf(values);
|
||||
return usesFieldRef() ? Slice.sliceArrayOf(fieldReference) : Slice.sliceArrayOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -264,13 +205,8 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public IndexOfArray indexOf(Object value) {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return IndexOfArray.arrayOf(fieldReference).indexOf(value);
|
||||
}
|
||||
|
||||
return usesExpression() ? IndexOfArray.arrayOf(expression).indexOf(value)
|
||||
: IndexOfArray.arrayOf(values).indexOf(value);
|
||||
return usesFieldRef() ? IndexOfArray.arrayOf(fieldReference).indexOf(value)
|
||||
: IndexOfArray.arrayOf(expression).indexOf(value);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -279,13 +215,7 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public ReverseArray reverse() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return ReverseArray.reverseArrayOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ReverseArray.reverseArrayOf(expression)
|
||||
: ReverseArray.reverseArrayOf(Collections.singletonList(values));
|
||||
return usesFieldRef() ? ReverseArray.reverseArrayOf(fieldReference) : ReverseArray.reverseArrayOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -323,12 +253,7 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public Zip zipWith(Object... arrays) {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Zip.arrayOf(fieldReference).zip(arrays);
|
||||
}
|
||||
|
||||
return (usesExpression() ? Zip.arrayOf(expression) : Zip.arrayOf(values)).zip(arrays);
|
||||
return (usesFieldRef() ? Zip.arrayOf(fieldReference) : Zip.arrayOf(expression)).zip(arrays);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -339,12 +264,7 @@ public class ArrayOperators {
|
||||
* @return
|
||||
*/
|
||||
public In containsValue(Object value) {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return In.arrayOf(fieldReference).containsValue(value);
|
||||
}
|
||||
|
||||
return (usesExpression() ? In.arrayOf(expression) : In.arrayOf(values)).containsValue(value);
|
||||
return (usesFieldRef() ? In.arrayOf(fieldReference) : In.arrayOf(expression)).containsValue(value);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -356,11 +276,8 @@ public class ArrayOperators {
|
||||
*/
|
||||
public ArrayToObject toObject() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return ArrayToObject.arrayValueOfToObject(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? ArrayToObject.arrayValueOfToObject(expression) : ArrayToObject.arrayToObject(values);
|
||||
return usesFieldRef() ? ArrayToObject.arrayValueOfToObject(fieldReference)
|
||||
: ArrayToObject.arrayValueOfToObject(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -377,20 +294,9 @@ public class ArrayOperators {
|
||||
Reduce startingWith(Object initialValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if {@link #fieldReference} is not {@literal null}.
|
||||
*/
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if {@link #expression} is not {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
private boolean usesExpression() {
|
||||
return expression != null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -433,19 +339,6 @@ public class ArrayOperators {
|
||||
return new ArrayElemAt(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ArrayElemAt}.
|
||||
*
|
||||
* @param values The array members. Must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayElemAt}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ArrayElemAt arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new ArrayElemAt(Collections.singletonList(values));
|
||||
}
|
||||
|
||||
public ArrayElemAt elementAt(int index) {
|
||||
return new ArrayElemAt(append(index));
|
||||
}
|
||||
@@ -503,19 +396,6 @@ public class ArrayOperators {
|
||||
return new ConcatArrays(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ConcatArrays}.
|
||||
*
|
||||
* @param values The array members. Must not be {@literal null}.
|
||||
* @return new instance of {@link ConcatArrays}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ConcatArrays arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new ConcatArrays(Collections.singletonList(values));
|
||||
}
|
||||
|
||||
public ConcatArrays concat(String arrayFieldReference) {
|
||||
|
||||
Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!");
|
||||
@@ -860,19 +740,6 @@ public class ArrayOperators {
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new Size(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Size}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link Size}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static Size lengthOfArray(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new Size(Collections.singletonList(values));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -915,19 +782,6 @@ public class ArrayOperators {
|
||||
return new Slice(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Slice}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link Slice}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static Slice sliceArrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new Slice(Collections.singletonList(values));
|
||||
}
|
||||
|
||||
public Slice itemCount(int nrElements) {
|
||||
return new Slice(append(nrElements));
|
||||
}
|
||||
@@ -998,19 +852,6 @@ public class ArrayOperators {
|
||||
return new IndexOfArrayBuilder(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start creating new {@link IndexOfArray}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link IndexOfArray}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static IndexOfArrayBuilder arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
return new IndexOfArrayBuilder(values);
|
||||
}
|
||||
|
||||
public IndexOfArray within(Range<Long> range) {
|
||||
return new IndexOfArray(append(AggregationUtils.toRangeValues(range)));
|
||||
}
|
||||
@@ -1165,17 +1006,6 @@ public class ArrayOperators {
|
||||
public static ReverseArray reverseArrayOf(AggregationExpression expression) {
|
||||
return new ReverseArray(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReverseArray}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link ReverseArray}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ReverseArray reverseArrayOf(Collection<?> values) {
|
||||
return new ReverseArray(values);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1528,19 +1358,6 @@ public class ArrayOperators {
|
||||
return new ZipBuilder(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start creating new {@link Zip}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link Zip}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static ZipBuilder arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Expression must not be null!");
|
||||
return new ZipBuilder(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link Zip} and set the {@code useLongestLength} property to {@literal true}.
|
||||
*
|
||||
@@ -1625,10 +1442,6 @@ public class ArrayOperators {
|
||||
* {@link AggregationExpression} for {@code $in}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Shashank Sharma
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/in/">https://docs.mongodb.com/manual/reference/operator/aggregation/in/</a>
|
||||
* @since 2.2
|
||||
*/
|
||||
public static class In extends AbstractAggregationExpression {
|
||||
|
||||
@@ -1647,14 +1460,18 @@ public class ArrayOperators {
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static InBuilder arrayOf(String fieldReference) {
|
||||
public static InBuilder arrayOf(final String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return value -> {
|
||||
return new InBuilder() {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new In(Arrays.asList(value, Fields.field(fieldReference)));
|
||||
@Override
|
||||
public In containsValue(Object value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new In(Arrays.asList(value, Fields.field(fieldReference)));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1664,34 +1481,18 @@ public class ArrayOperators {
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
public static InBuilder arrayOf(AggregationExpression expression) {
|
||||
public static InBuilder arrayOf(final AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return value -> {
|
||||
return new InBuilder() {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
@Override
|
||||
public In containsValue(Object value) {
|
||||
|
||||
return new In(Arrays.asList(value, expression));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Support for Aggregation In Search an Element in List of Objects to Filter Start creating {@link In}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link InBuilder}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public static InBuilder arrayOf(Collection<?> values) {
|
||||
|
||||
Assert.notNull(values, "Values must not be null!");
|
||||
|
||||
return value -> {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
return new In(Arrays.asList(value, values));
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
return new In(Arrays.asList(value, expression));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -56,11 +56,11 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
return rootContext.getMappedObject(document, type);
|
||||
public Document getMappedObject(Document document) {
|
||||
return rootContext.getMappedObject(document);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -69,6 +69,11 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
|
||||
if (field.isInternal()) {
|
||||
return new DirectFieldReference(new ExposedField(field, true));
|
||||
}
|
||||
|
||||
return getReference(field, field.getTarget());
|
||||
}
|
||||
|
||||
@@ -81,15 +86,6 @@ class ExposedFieldsAggregationOperationContext implements AggregationOperationCo
|
||||
return getReference(null, name);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
return rootContext.getFields(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link FieldReference} to the given {@link Field} with the given {@code name}.
|
||||
*
|
||||
|
||||
@@ -43,4 +43,12 @@ public interface Field {
|
||||
* @return
|
||||
*/
|
||||
boolean isAliased();
|
||||
|
||||
/**
|
||||
* @return true if the field name references a local value such as {@code $$this}.
|
||||
* @since 2.1.11
|
||||
*/
|
||||
default boolean isInternal() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ public final class Fields implements Iterable<Field> {
|
||||
this.fields = verify(fields);
|
||||
}
|
||||
|
||||
private static List<Field> verify(List<Field> fields) {
|
||||
private static final List<Field> verify(List<Field> fields) {
|
||||
|
||||
Map<String, Field> reference = new HashMap<String, Field>();
|
||||
|
||||
@@ -283,6 +283,11 @@ public final class Fields implements Iterable<Field> {
|
||||
return !getName().equals(getTarget());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInternal() {
|
||||
return getRaw().endsWith("$$this") || getRaw().endsWith("$$value");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} in case the field name starts with {@code $$}.
|
||||
* @since 1.10
|
||||
|
||||
@@ -15,15 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -94,15 +89,6 @@ public class GeoNearOperation implements AggregationOperation {
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
Document command = context.getMappedObject(nearQuery.toDocument());
|
||||
|
||||
if(command.containsKey("query")) {
|
||||
command.replace("query", context.getMappedObject(command.get("query", Document.class)));
|
||||
}
|
||||
|
||||
if(command.containsKey("collation")) {
|
||||
command.remove("collation");
|
||||
}
|
||||
|
||||
command.put("distanceField", distanceField);
|
||||
|
||||
if (StringUtils.hasText(indexKey)) {
|
||||
@@ -111,28 +97,4 @@ public class GeoNearOperation implements AggregationOperation {
|
||||
|
||||
return new Document("$geoNear", command);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toPipelineStages(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public List<Document> toPipelineStages(AggregationOperationContext context) {
|
||||
|
||||
Document command = toDocument(context);
|
||||
Number limit = (Number) command.get("$geoNear", Document.class).remove("num");
|
||||
|
||||
List<Document> stages = new ArrayList<>();
|
||||
stages.add(command);
|
||||
|
||||
if(nearQuery.getSkip() != null && nearQuery.getSkip() > 0){
|
||||
stages.add(new Document("$skip", nearQuery.getSkip()));
|
||||
}
|
||||
|
||||
if(limit != null) {
|
||||
stages.add(new Document("$limit", limit.longValue()));
|
||||
}
|
||||
|
||||
return stages;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,13 +128,7 @@ public class GraphLookupOperation implements InheritsFieldsAggregationOperation
|
||||
*/
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
|
||||
List<ExposedField> fields = new ArrayList<>(2);
|
||||
fields.add(new ExposedField(as, true));
|
||||
if(depthField != null) {
|
||||
fields.add(new ExposedField(depthField, true));
|
||||
}
|
||||
return ExposedFields.from(fields.toArray(new ExposedField[0]));
|
||||
return ExposedFields.from(new ExposedField(as, true));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -59,15 +59,6 @@ class NestedDelegatingExpressionAggregationOperationContext implements Aggregati
|
||||
return delegate.getMappedObject(document);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, Class<?> type) {
|
||||
return delegate.getMappedObject(document, type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
@@ -103,13 +94,4 @@ class NestedDelegatingExpressionAggregationOperationContext implements Aggregati
|
||||
public FieldReference getReference(String name) {
|
||||
return new ExpressionFieldReference(delegate.getReference(name));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
return delegate.getFields(type);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,11 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code $out}-operation.
|
||||
@@ -30,161 +26,18 @@ import org.springframework.util.StringUtils;
|
||||
*
|
||||
* @author Nikolay Bogdanov
|
||||
* @author Christoph Strobl
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/out/">MongoDB Aggregation Framework:
|
||||
* $out</a>
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/out/">MongoDB Aggregation Framework: $out</a>
|
||||
*/
|
||||
public class OutOperation implements AggregationOperation {
|
||||
|
||||
private final @Nullable String databaseName;
|
||||
private final String collectionName;
|
||||
private final @Nullable Document uniqueKey;
|
||||
private final @Nullable OutMode mode;
|
||||
|
||||
/**
|
||||
* @param outCollectionName Collection name to export the results. Must not be {@literal null}.
|
||||
*/
|
||||
public OutOperation(String outCollectionName) {
|
||||
this(null, outCollectionName, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param databaseName Optional database name the target collection is located in. Can be {@literal null}.
|
||||
* @param collectionName Collection name to export the results. Must not be {@literal null}. Can be {@literal null}.
|
||||
* @param uniqueKey Optional unique key spec identify a document in the to collection for replacement or merge.
|
||||
* @param mode The mode for merging the aggregation pipeline output with the target collection. Can be
|
||||
* {@literal null}. {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
private OutOperation(@Nullable String databaseName, String collectionName, @Nullable Document uniqueKey,
|
||||
@Nullable OutMode mode) {
|
||||
|
||||
Assert.notNull(collectionName, "Collection name must not be null!");
|
||||
|
||||
this.databaseName = databaseName;
|
||||
this.collectionName = collectionName;
|
||||
this.uniqueKey = uniqueKey;
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the database of the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param database can be {@literal null}. Defaulted to aggregation target database.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation in(@Nullable String database) {
|
||||
return new OutOperation(database, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the field that uniquely identifies a document in the target collection. <br />
|
||||
* For convenience the given {@literal key} can either be a single field name or the Json representation of a key
|
||||
* {@link Document}.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* // {
|
||||
* // "field-1" : 1
|
||||
* // }
|
||||
* .uniqueKey("field-1")
|
||||
*
|
||||
* // {
|
||||
* // "field-1" : 1,
|
||||
* // "field-2" : 1
|
||||
* // }
|
||||
* .uniqueKey("{ 'field-1' : 1, 'field-2' : 1}")
|
||||
* </pre>
|
||||
*
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param key can be {@literal null}. Server uses {@literal _id} when {@literal null}.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation uniqueKey(@Nullable String key) {
|
||||
|
||||
Document uniqueKey = key == null ? null : BsonUtils.toDocumentOrElse(key, it -> new Document(it, 1));
|
||||
return new OutOperation(databaseName, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally specify the fields that uniquely identifies a document in the target collection. <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* // {
|
||||
* // "field-1" : 1
|
||||
* // "field-2" : 1
|
||||
* // }
|
||||
* .uniqueKeyOf(Arrays.asList("field-1", "field-2"))
|
||||
* </pre>
|
||||
*
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation uniqueKeyOf(Iterable<String> fields) {
|
||||
|
||||
Assert.notNull(fields, "Fields must not be null!");
|
||||
|
||||
Document uniqueKey = new Document();
|
||||
fields.forEach(it -> uniqueKey.append(it, 1));
|
||||
|
||||
return new OutOperation(databaseName, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify how to merge the aggregation output with the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @param mode must not be {@literal null}.
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation mode(OutMode mode) {
|
||||
|
||||
Assert.notNull(mode, "Mode must not be null!");
|
||||
return new OutOperation(databaseName, collectionName, uniqueKey, mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @see OutMode#REPLACE_COLLECTION
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation replaceCollection() {
|
||||
return mode(OutMode.REPLACE_COLLECTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace/Upsert documents in the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @see OutMode#REPLACE
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation replaceDocuments() {
|
||||
return mode(OutMode.REPLACE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert documents to the target collection. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.2 or later.
|
||||
*
|
||||
* @return new instance of {@link OutOperation}.
|
||||
* @see OutMode#INSERT
|
||||
* @since 2.2
|
||||
*/
|
||||
public OutOperation insertDocuments() {
|
||||
return mode(OutMode.INSERT);
|
||||
Assert.notNull(outCollectionName, "Collection name must not be null!");
|
||||
this.collectionName = outCollectionName;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -193,62 +46,6 @@ public class OutOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
if (!requiresMongoDb42Format()) {
|
||||
return new Document("$out", collectionName);
|
||||
}
|
||||
|
||||
Assert.state(mode != null, "Mode must not be null!");
|
||||
|
||||
Document $out = new Document("to", collectionName) //
|
||||
.append("mode", mode.getMongoMode());
|
||||
|
||||
if (StringUtils.hasText(databaseName)) {
|
||||
$out.append("db", databaseName);
|
||||
}
|
||||
|
||||
if (uniqueKey != null) {
|
||||
$out.append("uniqueKey", uniqueKey);
|
||||
}
|
||||
|
||||
return new Document("$out", $out);
|
||||
}
|
||||
|
||||
private boolean requiresMongoDb42Format() {
|
||||
return StringUtils.hasText(databaseName) || mode != null || uniqueKey != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* The mode for merging the aggregation pipeline output.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public enum OutMode {
|
||||
|
||||
/**
|
||||
* Write documents to the target collection. Errors if a document same uniqueKey already exists.
|
||||
*/
|
||||
INSERT("insertDocuments"),
|
||||
|
||||
/**
|
||||
* Update on any document in the target collection with the same uniqueKey.
|
||||
*/
|
||||
REPLACE("replaceDocuments"),
|
||||
|
||||
/**
|
||||
* Replaces the to collection with the output from the aggregation pipeline. Cannot be in a different database.
|
||||
*/
|
||||
REPLACE_COLLECTION("replaceCollection");
|
||||
|
||||
private String mode;
|
||||
|
||||
OutMode(String mode) {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
public String getMongoMode() {
|
||||
return mode;
|
||||
}
|
||||
return new Document("$out", collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* {@link AggregationOperationContext} implementation prefixing non-command keys on root level with the given prefix.
|
||||
@@ -64,15 +63,6 @@ public class PrefixingDelegatingAggregationOperationContext implements Aggregati
|
||||
return doPrefix(delegate.getMappedObject(document));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
return doPrefix(delegate.getMappedObject(document, type));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.Field)
|
||||
@@ -91,15 +81,6 @@ public class PrefixingDelegatingAggregationOperationContext implements Aggregati
|
||||
return delegate.getReference(name);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
return delegate.getFields(type);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Document doPrefix(Document source) {
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond;
|
||||
@@ -74,16 +73,6 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
this(NONE, ProjectionOperationBuilder.FieldProjection.from(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ProjectionOperation} including all top level fields of the given {@link Class type}.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ProjectionOperation(Class<?> type) {
|
||||
this(NONE, Collections.singletonList(new TypeProjection(type)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor to allow building up {@link ProjectionOperation} instances from already existing
|
||||
* {@link Projection}s.
|
||||
@@ -177,48 +166,6 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
return new ProjectionOperation(this.projections, FieldProjection.from(fields, true));
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the current {@link ProjectionOperation} as an array with given name. <br />
|
||||
* If you want to specify array values directly use {@link #andArrayOf(Object...)}.
|
||||
*
|
||||
* @param name the target property name.
|
||||
* @return new instance of {@link ProjectionOperation}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ProjectionOperation asArray(String name) {
|
||||
|
||||
return new ProjectionOperation(Collections.emptyList(),
|
||||
Collections.singletonList(new ArrayProjection(Fields.field(name), (List) this.projections)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Includes the given values ({@link Field field references}, {@link AggregationExpression expression}, plain values)
|
||||
* as an array. <br />
|
||||
* The target property name needs to be set via {@link ArrayProjectionOperationBuilder#as(String)}.
|
||||
*
|
||||
* @param values must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayProjectionOperationBuilder}.
|
||||
* @throws IllegalArgumentException if the required argument it {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder andArrayOf(Object... values) {
|
||||
|
||||
ArrayProjectionOperationBuilder builder = new ArrayProjectionOperationBuilder(this);
|
||||
|
||||
for (Object value : values) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
builder.and((Field) value);
|
||||
} else if (value instanceof AggregationExpression) {
|
||||
builder.and((AggregationExpression) value);
|
||||
} else {
|
||||
builder.and(value);
|
||||
}
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields()
|
||||
@@ -1548,8 +1495,7 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
final Field aliasedField = Fields.field(alias, this.field.getName());
|
||||
return new OperationProjection(aliasedField, operation, values.toArray()) {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder.OperationProjection#getField()
|
||||
*/
|
||||
@Override
|
||||
@@ -1749,164 +1695,9 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document(field.getName(), expression.toDocument(context));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Projection} including all top level fields of the given target type mapped to include potentially
|
||||
* deviating field names.
|
||||
*
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class TypeProjection extends Projection {
|
||||
|
||||
private final Class<?> type;
|
||||
|
||||
TypeProjection(Class<?> type) {
|
||||
|
||||
super(Fields.field(type.getSimpleName()));
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
Document projections = new Document();
|
||||
|
||||
Fields fields = context.getFields(type);
|
||||
fields.forEach(it -> projections.append(it.getName(), 1));
|
||||
return context.getMappedObject(projections, type);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@code array} projections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public static class ArrayProjectionOperationBuilder {
|
||||
|
||||
private ProjectionOperation target;
|
||||
private final List<Object> projections;
|
||||
|
||||
public ArrayProjectionOperationBuilder(ProjectionOperation target) {
|
||||
|
||||
this.target = target;
|
||||
this.projections = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param expression
|
||||
* @return
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder and(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "AggregationExpression must not be null!");
|
||||
|
||||
this.projections.add(expression);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param field
|
||||
* @return
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder and(Field field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
|
||||
this.projections.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link ArrayProjectionOperationBuilder} with the current {@link Projection}s and the given one.
|
||||
*
|
||||
* @param value
|
||||
* @return
|
||||
*/
|
||||
public ArrayProjectionOperationBuilder and(Object value) {
|
||||
|
||||
this.projections.add(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the {@link ProjectionOperation} for the array property with given {@literal name}.
|
||||
*
|
||||
* @param name The target property name. Must not be {@literal null}.
|
||||
* @return new instance of {@link ArrayProjectionOperationBuilder}.
|
||||
*/
|
||||
public ProjectionOperation as(String name) {
|
||||
|
||||
return new ProjectionOperation(target.projections,
|
||||
Collections.singletonList(new ArrayProjection(Fields.field(name), this.projections)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
static class ArrayProjection extends Projection {
|
||||
|
||||
private final Field targetField;
|
||||
private final List<Object> projections;
|
||||
|
||||
public ArrayProjection(Field targetField, List<Object> projections) {
|
||||
|
||||
super(targetField);
|
||||
this.targetField = targetField;
|
||||
this.projections = projections;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext)
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
return new Document(targetField.getName(),
|
||||
projections.stream().map(it -> toArrayEntry(it, context)).collect(Collectors.toList()));
|
||||
}
|
||||
|
||||
private Object toArrayEntry(Object projection, AggregationOperationContext ctx) {
|
||||
|
||||
if (projection instanceof Field) {
|
||||
return ctx.getReference((Field) projection).toString();
|
||||
}
|
||||
|
||||
if (projection instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) projection).toDocument(ctx);
|
||||
}
|
||||
|
||||
if (projection instanceof FieldProjection) {
|
||||
return ctx.getReference(((FieldProjection) projection).getExposedField().getTarget()).toString();
|
||||
}
|
||||
|
||||
if (projection instanceof Projection) {
|
||||
((Projection) projection).toDocument(ctx);
|
||||
}
|
||||
|
||||
return projection;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,12 +17,9 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import static org.springframework.data.mongodb.core.aggregation.Fields.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
@@ -30,7 +27,6 @@ import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldRefe
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -74,16 +70,7 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document) {
|
||||
return getMappedObject(document, type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document, java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Document getMappedObject(Document document, @Nullable Class<?> type) {
|
||||
return mapper.getMappedObject(document, type != null ? mappingContext.getPersistentEntity(type) : null);
|
||||
return mapper.getMappedObject(document, mappingContext.getPersistentEntity(type));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -92,6 +79,8 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
*/
|
||||
@Override
|
||||
public FieldReference getReference(Field field) {
|
||||
|
||||
PropertyPath.from(field.getTarget(), type);
|
||||
return getReferenceFor(field);
|
||||
}
|
||||
|
||||
@@ -104,30 +93,6 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return getReferenceFor(field(name));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getFields(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Fields getFields(Class<?> type) {
|
||||
|
||||
Assert.notNull(type, "Type must not be null!");
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(type);
|
||||
|
||||
if (entity == null) {
|
||||
return AggregationOperationContext.super.getFields(type);
|
||||
}
|
||||
|
||||
List<String> fields = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty property : entity) {
|
||||
fields.add(property.getName());
|
||||
}
|
||||
|
||||
return Fields.fields(fields.toArray(new String[0]));
|
||||
}
|
||||
|
||||
private FieldReference getReferenceFor(Field field) {
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext
|
||||
|
||||
@@ -16,15 +16,12 @@
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.Date;
|
||||
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.beans.factory.InitializingBean;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.core.convert.support.GenericConversionService;
|
||||
import org.springframework.data.convert.ConverterBuilder;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverters.BigIntegerToObjectIdConverter;
|
||||
@@ -96,24 +93,6 @@ public abstract class AbstractMongoConverter implements MongoConverter, Initiali
|
||||
conversionService.addConverter(BigIntegerToObjectIdConverter.INSTANCE);
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(Date.class, Long.class)) {
|
||||
conversionService
|
||||
.addConverter(ConverterBuilder.writing(Date.class, Long.class, Date::getTime).getWritingConverter());
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(Long.class, Date.class)) {
|
||||
conversionService.addConverter(ConverterBuilder.reading(Long.class, Date.class, Date::new).getReadingConverter());
|
||||
}
|
||||
|
||||
if (!conversionService.canConvert(ObjectId.class, Date.class)) {
|
||||
|
||||
conversionService.addConverter(
|
||||
ConverterBuilder.reading(ObjectId.class, Date.class, objectId -> new Date(objectId.getTimestamp()))
|
||||
.getReadingConverter());
|
||||
}
|
||||
|
||||
conversionService
|
||||
.addConverter(ConverterBuilder.reading(Code.class, String.class, Code::getCode).getReadingConverter());
|
||||
conversions.registerConvertersIn(conversionService);
|
||||
}
|
||||
|
||||
|
||||
@@ -15,16 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.UnaryOperator;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.DefaultTypeMapper;
|
||||
import org.springframework.data.convert.SimpleTypeInformationMapper;
|
||||
import org.springframework.data.convert.TypeAliasAccessor;
|
||||
@@ -61,58 +58,21 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<Bson> implements M
|
||||
|
||||
private final TypeAliasAccessor<Bson> accessor;
|
||||
private final @Nullable String typeKey;
|
||||
private UnaryOperator<Class<?>> writeTarget = UnaryOperator.identity();
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code _class}.
|
||||
*/
|
||||
public DefaultMongoTypeMapper() {
|
||||
this(DEFAULT_TYPE_KEY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey) {
|
||||
this(typeKey, Collections.singletonList(new SimpleTypeInformationMapper()));
|
||||
this(typeKey, Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
* @param mappingContext the mapping context.
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext) {
|
||||
this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext,
|
||||
Collections.singletonList(new SimpleTypeInformationMapper()));
|
||||
Arrays.asList(new SimpleTypeInformationMapper()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses
|
||||
* {@link UnaryOperator} to apply {@link CustomConversions}.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
* @param mappingContext the mapping context to look up types using type hints.
|
||||
* @see MappingMongoConverter#getWriteTarget(Class)
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey,
|
||||
MappingContext<? extends PersistentEntity<?, ?>, ?> mappingContext, UnaryOperator<Class<?>> writeTarget) {
|
||||
this(typeKey, new DocumentTypeAliasAccessor(typeKey), mappingContext,
|
||||
Collections.singletonList(new SimpleTypeInformationMapper()));
|
||||
this.writeTarget = writeTarget;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTypeMapper} with fully-qualified type hints using {@code typeKey}. Uses
|
||||
* {@link TypeInformationMapper} to map type hints.
|
||||
*
|
||||
* @param typeKey name of the field to read and write type hints. Can be {@literal null} to disable type hints.
|
||||
* @param mappers
|
||||
*/
|
||||
public DefaultMongoTypeMapper(@Nullable String typeKey, List<? extends TypeInformationMapper> mappers) {
|
||||
this(typeKey, new DocumentTypeAliasAccessor(typeKey), null, mappers);
|
||||
}
|
||||
@@ -160,15 +120,6 @@ public class DefaultMongoTypeMapper extends DefaultTypeMapper<Bson> implements M
|
||||
accessor.writeTypeTo(result, new Document("$in", restrictedMappedTypes));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.MongoTypeMapper#getWriteTargetTypeFor(java.lang.Class)
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getWriteTargetTypeFor(Class<?> source) {
|
||||
return writeTarget.apply(source);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.convert.DefaultTypeMapper#getFallbackTypeFor(java.lang.Object)
|
||||
|
||||
@@ -15,33 +15,22 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.EntityInstantiator;
|
||||
import org.springframework.data.convert.EntityInstantiators;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
@@ -122,8 +111,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
this.dbRefResolver = dbRefResolver;
|
||||
this.mappingContext = mappingContext;
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext,
|
||||
this::getWriteTarget);
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext);
|
||||
this.idMapper = new QueryMapper(this);
|
||||
|
||||
this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE);
|
||||
@@ -223,11 +211,20 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return conversionService.convert(bson, rawType);
|
||||
}
|
||||
|
||||
if (DBObject.class.isAssignableFrom(rawType)) {
|
||||
if (Document.class.isAssignableFrom(rawType)) {
|
||||
return (S) bson;
|
||||
}
|
||||
|
||||
if (Document.class.isAssignableFrom(rawType)) {
|
||||
if (DBObject.class.isAssignableFrom(rawType)) {
|
||||
|
||||
if (bson instanceof DBObject) {
|
||||
return (S) bson;
|
||||
}
|
||||
|
||||
if (bson instanceof Document) {
|
||||
return (S) new BasicDBObject((Document) bson);
|
||||
}
|
||||
|
||||
return (S) bson;
|
||||
}
|
||||
|
||||
@@ -528,7 +525,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (idProperty != null && !dbObjectAccessor.hasValue(idProperty)) {
|
||||
|
||||
Object value = idMapper.convertId(accessor.getProperty(idProperty), idProperty.getFieldType());
|
||||
Object value = idMapper.convertId(accessor.getProperty(idProperty));
|
||||
|
||||
if (value != null) {
|
||||
dbObjectAccessor.put(idProperty, value);
|
||||
@@ -632,7 +629,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = valueType.isSubTypeOf(prop.getType())
|
||||
MongoPersistentEntity<?> entity = isSubTypeOf(obj.getClass(), prop.getType())
|
||||
? mappingContext.getRequiredPersistentEntity(obj.getClass())
|
||||
: mappingContext.getRequiredPersistentEntity(type);
|
||||
|
||||
@@ -671,10 +668,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
protected List<Object> createCollection(Collection<?> collection, MongoPersistentProperty property) {
|
||||
|
||||
if (!property.isDbReference()) {
|
||||
|
||||
if (property.hasExplicitWriteTarget()) {
|
||||
return writeCollectionInternal(collection, new TypeInformationWrapper<>(property), new ArrayList<>());
|
||||
}
|
||||
return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList());
|
||||
}
|
||||
|
||||
@@ -754,8 +747,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Class<?> elementType = element == null ? null : element.getClass();
|
||||
|
||||
if (elementType == null || conversions.isSimpleType(elementType)) {
|
||||
collection.add(getPotentiallyConvertedSimpleWrite(element,
|
||||
componentType != null ? componentType.getType() : Object.class));
|
||||
collection.add(getPotentiallyConvertedSimpleWrite(element));
|
||||
} else if (element instanceof Collection || elementType.isArray()) {
|
||||
collection.add(writeCollectionInternal(asCollection(element), componentType, new BasicDBList()));
|
||||
} else {
|
||||
@@ -858,7 +850,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
return conversions.hasCustomWriteTarget(key.getClass(), String.class)
|
||||
? (String) getPotentiallyConvertedSimpleWrite(key, Object.class)
|
||||
? (String) getPotentiallyConvertedSimpleWrite(key)
|
||||
: key.toString();
|
||||
}
|
||||
|
||||
@@ -900,13 +892,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param key must not be {@literal null}.
|
||||
*/
|
||||
private void writeSimpleInternal(Object value, Bson bson, String key) {
|
||||
addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class));
|
||||
addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
private void writeSimpleInternal(Object value, Bson bson, MongoPersistentProperty property) {
|
||||
DocumentAccessor accessor = new DocumentAccessor(bson);
|
||||
accessor.put(property, getPotentiallyConvertedSimpleWrite(value,
|
||||
property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class));
|
||||
accessor.put(property, getPotentiallyConvertedSimpleWrite(value));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -917,19 +908,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nullable Class<?> typeHint) {
|
||||
private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value) {
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeHint != null && Object.class != typeHint) {
|
||||
|
||||
if (conversionService.canConvert(value.getClass(), typeHint)) {
|
||||
value = conversionService.convert(value, typeHint);
|
||||
}
|
||||
}
|
||||
|
||||
Optional<Class<?>> customTarget = conversions.getCustomWriteTarget(value.getClass());
|
||||
|
||||
if (customTarget.isPresent()) {
|
||||
@@ -1002,8 +986,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
throw new MappingException("Cannot create a reference to an object with a NULL id.");
|
||||
}
|
||||
|
||||
return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity,
|
||||
idMapper.convertId(id, idProperty != null ? idProperty.getFieldType() : ObjectId.class));
|
||||
return dbRefResolver.createDbRef(property == null ? null : property.getDBRef(), entity, idMapper.convertId(id));
|
||||
}
|
||||
|
||||
throw new MappingException("No id property found on class " + entity.getType());
|
||||
@@ -1033,8 +1016,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(targetType, "Target type must not be null!");
|
||||
Assert.notNull(path, "Object path must not be null!");
|
||||
|
||||
Class<?> collectionType = targetType.isSubTypeOf(Collection.class) //
|
||||
? targetType.getType() //
|
||||
Class<?> collectionType = targetType.getType();
|
||||
collectionType = isSubTypeOf(collectionType, Collection.class) //
|
||||
? collectionType //
|
||||
: List.class;
|
||||
|
||||
TypeInformation<?> componentType = targetType.getComponentType() != null //
|
||||
@@ -1228,8 +1212,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (conversions.isSimpleType(obj.getClass())) {
|
||||
// Doesn't need conversion
|
||||
return getPotentiallyConvertedSimpleWrite(obj,
|
||||
typeInformation != null ? typeInformation.getType() : Object.class);
|
||||
return getPotentiallyConvertedSimpleWrite(obj);
|
||||
}
|
||||
|
||||
if (obj instanceof List) {
|
||||
@@ -1609,7 +1592,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param ref
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
Document readRef(DBRef ref) {
|
||||
return dbRefResolver.fetch(ref);
|
||||
}
|
||||
@@ -1625,16 +1607,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return dbRefResolver.bulkFetch(references);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the conversion target type if defined or return the {@literal source}.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
public Class<?> getWriteTarget(Class<?> source) {
|
||||
return conversions.getCustomWriteTarget(source).orElse(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MappingMongoConverter} using the given {@link MongoDbFactory} when loading {@link DBRef}.
|
||||
@@ -1683,6 +1655,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given type is a sub type of the given reference, i.e. assignable but not the exact same type.
|
||||
*
|
||||
* @param type must not be {@literal null}.
|
||||
* @param reference must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
private static boolean isSubTypeOf(Class<?> type, Class<?> reference) {
|
||||
return !type.equals(reference) && reference.isAssignableFrom(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Marker class used to indicate we have a non root document object here that might be used within an update - so we
|
||||
* need to preserve type hints for potential nested elements but need to remove it on top level.
|
||||
@@ -1703,91 +1686,4 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static class TypeInformationWrapper<S> implements TypeInformation<S> {
|
||||
|
||||
private MongoPersistentProperty persistentProperty;
|
||||
private TypeInformation<?> delegate;
|
||||
|
||||
public TypeInformationWrapper(MongoPersistentProperty property) {
|
||||
|
||||
this.persistentProperty = property;
|
||||
this.delegate = property.getTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<org.springframework.data.util.TypeInformation<?>> getParameterTypes(Constructor constructor) {
|
||||
return persistentProperty.getTypeInformation().getParameterTypes(constructor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getProperty(String property) {
|
||||
return delegate.getProperty(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCollectionLike() {
|
||||
return delegate.isCollectionLike();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getComponentType() {
|
||||
return ClassTypeInformation.from(persistentProperty.getFieldType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMap() {
|
||||
return delegate.isMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getMapValueType() {
|
||||
return ClassTypeInformation.from(persistentProperty.getFieldType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class getType() {
|
||||
return delegate.getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClassTypeInformation<?> getRawTypeInformation() {
|
||||
return delegate.getRawTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getActualType() {
|
||||
return delegate.getActualType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getReturnType(Method method) {
|
||||
return delegate.getReturnType(method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<org.springframework.data.util.TypeInformation<?>> getParameterTypes(Method method) {
|
||||
return delegate.getParameterTypes(method);
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation<?> getSuperTypeInformation(Class superType) {
|
||||
return delegate.getSuperTypeInformation(superType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAssignableFrom(org.springframework.data.util.TypeInformation target) {
|
||||
return delegate.isAssignableFrom(target);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<org.springframework.data.util.TypeInformation<?>> getTypeArguments() {
|
||||
return delegate.getTypeArguments();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.springframework.data.util.TypeInformation specialize(ClassTypeInformation type) {
|
||||
return delegate.specialize(type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,6 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.data.convert.EntityConverter;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
@@ -85,18 +83,7 @@ public interface MongoConverter
|
||||
|
||||
if (sourceDocument.containsKey("$ref") && sourceDocument.containsKey("$id")) {
|
||||
|
||||
Object id = sourceDocument.get("$id");
|
||||
String collection = sourceDocument.getString("$ref");
|
||||
|
||||
MongoPersistentEntity<?> entity = getMappingContext().getPersistentEntity(targetType);
|
||||
if (entity != null && entity.hasIdProperty()) {
|
||||
id = convertId(id, entity.getIdProperty().getFieldType());
|
||||
}
|
||||
|
||||
DBRef ref = sourceDocument.containsKey("$db") ? new DBRef(sourceDocument.getString("$db"), collection, id)
|
||||
: new DBRef(collection, id);
|
||||
|
||||
sourceDocument = dbRefResolver.fetch(ref);
|
||||
sourceDocument = dbRefResolver.fetch(new DBRef(sourceDocument.getString("$ref"), sourceDocument.get("$id")));
|
||||
if (sourceDocument == null) {
|
||||
return null;
|
||||
}
|
||||
@@ -115,38 +102,4 @@ public interface MongoConverter
|
||||
}
|
||||
return getConversionService().convert(source, targetType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given raw id value into either {@link ObjectId} or {@link String}.
|
||||
*
|
||||
* @param id
|
||||
* @param targetType
|
||||
* @return {@literal null} if source {@literal id} is already {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
default Object convertId(@Nullable Object id, Class<?> targetType) {
|
||||
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignable(ObjectId.class, targetType)) {
|
||||
|
||||
if (id instanceof String) {
|
||||
|
||||
if (ObjectId.isValid(id.toString())) {
|
||||
return new ObjectId(id.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return getConversionService().canConvert(id.getClass(), targetType)
|
||||
? getConversionService().convert(id, targetType)
|
||||
: convertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return convertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,12 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.data.convert.ConverterBuilder.*;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URL;
|
||||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
@@ -34,7 +31,6 @@ import org.bson.BsonTimestamp;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.Decimal128;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionFailedException;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
@@ -76,9 +72,7 @@ abstract class MongoConverters {
|
||||
List<Object> converters = new ArrayList<>();
|
||||
|
||||
converters.add(BigDecimalToStringConverter.INSTANCE);
|
||||
converters.add(BigDecimalToDecimal128Converter.INSTANCE);
|
||||
converters.add(StringToBigDecimalConverter.INSTANCE);
|
||||
converters.add(Decimal128ToBigDecimalConverter.INSTANCE);
|
||||
converters.add(BigIntegerToStringConverter.INSTANCE);
|
||||
converters.add(StringToBigIntegerConverter.INSTANCE);
|
||||
converters.add(URLToStringConverter.INSTANCE);
|
||||
@@ -96,8 +90,6 @@ abstract class MongoConverters {
|
||||
converters.add(BinaryToByteArrayConverter.INSTANCE);
|
||||
converters.add(BsonTimestampToInstantConverter.INSTANCE);
|
||||
|
||||
converters.add(reading(String.class, URI.class, URI::create).andWriting(URI::toString));
|
||||
|
||||
return converters;
|
||||
}
|
||||
|
||||
@@ -161,17 +153,6 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @since 2.2
|
||||
*/
|
||||
enum BigDecimalToDecimal128Converter implements Converter<BigDecimal, Decimal128> {
|
||||
INSTANCE;
|
||||
|
||||
public Decimal128 convert(BigDecimal source) {
|
||||
return source == null ? null : new Decimal128(source);
|
||||
}
|
||||
}
|
||||
|
||||
enum StringToBigDecimalConverter implements Converter<String, BigDecimal> {
|
||||
INSTANCE;
|
||||
|
||||
@@ -180,17 +161,6 @@ abstract class MongoConverters {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @since 2.2
|
||||
*/
|
||||
enum Decimal128ToBigDecimalConverter implements Converter<Decimal128, BigDecimal> {
|
||||
INSTANCE;
|
||||
|
||||
public BigDecimal convert(Decimal128 source) {
|
||||
return source.bigDecimalValue();
|
||||
}
|
||||
}
|
||||
|
||||
enum BigIntegerToStringConverter implements Converter<BigInteger, String> {
|
||||
INSTANCE;
|
||||
|
||||
|
||||
@@ -46,15 +46,4 @@ public interface MongoTypeMapper extends TypeMapper<Bson> {
|
||||
* @param restrictedTypes must not be {@literal null}
|
||||
*/
|
||||
void writeTypeRestrictions(Document result, Set<Class<?>> restrictedTypes);
|
||||
|
||||
/**
|
||||
* Compute the target type for a given source considering {@link org.springframework.data.convert.CustomConversions}.
|
||||
*
|
||||
* @param source the source type.
|
||||
* @return never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
default Class<?> getWriteTargetTypeFor(Class<?> source) {
|
||||
return source;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
@@ -34,6 +33,7 @@ import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -57,7 +57,6 @@ import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.BasicDBList;
|
||||
import com.mongodb.BasicDBObject;
|
||||
@@ -255,16 +254,7 @@ public class QueryMapper {
|
||||
*/
|
||||
protected Field createPropertyField(@Nullable MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
if (entity == null) {
|
||||
return new Field(key);
|
||||
}
|
||||
|
||||
if (Field.ID_KEY.equals(key)) {
|
||||
return new MetadataBackedField(key, entity, mappingContext, entity.getIdProperty());
|
||||
}
|
||||
|
||||
return new MetadataBackedField(key, entity, mappingContext);
|
||||
return entity == null ? new Field(key) : new MetadataBackedField(key, entity, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -336,12 +326,6 @@ public class QueryMapper {
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object getMappedValue(Field documentField, Object value) {
|
||||
|
||||
if (documentField.getProperty() != null && documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
if (conversionService.canConvert(value.getClass(), documentField.getProperty().getFieldType())) {
|
||||
value = conversionService.convert(value, documentField.getProperty().getFieldType());
|
||||
}
|
||||
}
|
||||
|
||||
if (documentField.isIdField() && !documentField.isAssociation()) {
|
||||
|
||||
if (isDBObject(value)) {
|
||||
@@ -352,11 +336,11 @@ public class QueryMapper {
|
||||
String inKey = valueDbo.containsField("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id, getIdTypeForField(documentField)));
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
resultDbo.put(inKey, ids);
|
||||
} else if (valueDbo.containsField("$ne")) {
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne"), getIdTypeForField(documentField)));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject(resultDbo, Optional.empty());
|
||||
}
|
||||
@@ -371,18 +355,18 @@ public class QueryMapper {
|
||||
String inKey = valueDbo.containsKey("$in") ? "$in" : "$nin";
|
||||
List<Object> ids = new ArrayList<>();
|
||||
for (Object id : (Iterable<?>) valueDbo.get(inKey)) {
|
||||
ids.add(convertId(id, getIdTypeForField(documentField)));
|
||||
ids.add(convertId(id));
|
||||
}
|
||||
resultDbo.put(inKey, ids);
|
||||
} else if (valueDbo.containsKey("$ne")) {
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne"), getIdTypeForField(documentField)));
|
||||
resultDbo.put("$ne", convertId(valueDbo.get("$ne")));
|
||||
} else {
|
||||
return getMappedObject(resultDbo, Optional.empty());
|
||||
}
|
||||
return resultDbo;
|
||||
|
||||
} else {
|
||||
return convertId(value, getIdTypeForField(documentField));
|
||||
return convertId(value);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -397,14 +381,6 @@ public class QueryMapper {
|
||||
return convertSimpleOrDocument(value, documentField.getPropertyEntity());
|
||||
}
|
||||
|
||||
private boolean isIdField(Field documentField) {
|
||||
return documentField.getProperty() != null && documentField.getProperty().isIdProperty();
|
||||
}
|
||||
|
||||
private Class<?> getIdTypeForField(Field documentField) {
|
||||
return isIdField(documentField) ? documentField.getProperty().getFieldType() : ObjectId.class;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link Field} represents an association reference that together with the given value
|
||||
* requires conversion to a {@link org.springframework.data.mongodb.core.mapping.DBRef} object. We check whether the
|
||||
@@ -529,14 +505,7 @@ public class QueryMapper {
|
||||
if (source instanceof DBRef) {
|
||||
|
||||
DBRef ref = (DBRef) source;
|
||||
Object id = convertId(ref.getId(),
|
||||
property != null && property.isIdProperty() ? property.getFieldType() : ObjectId.class);
|
||||
|
||||
if (StringUtils.hasText(ref.getDatabaseName())) {
|
||||
return new DBRef(ref.getDatabaseName(), ref.getCollectionName(), id);
|
||||
} else {
|
||||
return new DBRef(ref.getCollectionName(), id);
|
||||
}
|
||||
return new DBRef(ref.getCollectionName(), convertId(ref.getId()));
|
||||
}
|
||||
|
||||
if (source instanceof Iterable) {
|
||||
@@ -617,24 +586,24 @@ public class QueryMapper {
|
||||
*
|
||||
* @param id
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public Object convertId(@Nullable Object id) {
|
||||
return convertId(id, ObjectId.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given raw id value into either {@link ObjectId} or {@link Class targetType}.
|
||||
*
|
||||
* @param id can be {@literal null}.
|
||||
* @param targetType
|
||||
* @return the converted {@literal id} or {@literal null} if the source was already {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
public Object convertId(@Nullable Object id, Class<?> targetType) {
|
||||
return converter.convertId(id, targetType);
|
||||
if (id == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (id instanceof String) {
|
||||
return ObjectId.isValid(id.toString()) ? conversionService.convert(id, ObjectId.class) : id;
|
||||
}
|
||||
|
||||
try {
|
||||
return conversionService.canConvert(id.getClass(), ObjectId.class) ? conversionService.convert(id, ObjectId.class)
|
||||
: delegateConvertToMongoType(id, null);
|
||||
} catch (ConversionException o_O) {
|
||||
return delegateConvertToMongoType(id, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -782,8 +751,6 @@ public class QueryMapper {
|
||||
*/
|
||||
protected static class Field {
|
||||
|
||||
protected static final Pattern POSITIONAL_OPERATOR = Pattern.compile("\\$\\[.*\\]");
|
||||
|
||||
private static final String ID_KEY = "_id";
|
||||
|
||||
protected final String name;
|
||||
@@ -1052,8 +1019,7 @@ public class QueryMapper {
|
||||
@Nullable
|
||||
private PersistentPropertyPath<MongoPersistentProperty> getPath(String pathExpression) {
|
||||
|
||||
String rawPath = pathExpression.replaceAll("\\.\\d+", "") //
|
||||
.replaceAll(POSITIONAL_OPERATOR.pattern(), "");
|
||||
String rawPath = pathExpression.replaceAll("\\.\\d+", "");
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
if (path == null || isPathToJavaLangClassProperty(path)) {
|
||||
@@ -1099,6 +1065,11 @@ public class QueryMapper {
|
||||
private PropertyPath forName(String path) {
|
||||
|
||||
try {
|
||||
|
||||
if (entity.getPersistentProperty(path) != null) {
|
||||
return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation());
|
||||
}
|
||||
|
||||
return PropertyPath.from(path, entity.getTypeInformation());
|
||||
} catch (PropertyReferenceException | InvalidPersistentPropertyPath e) {
|
||||
|
||||
@@ -1228,11 +1199,6 @@ public class QueryMapper {
|
||||
return true;
|
||||
}
|
||||
|
||||
Matcher matcher = POSITIONAL_OPERATOR.matcher(partial);
|
||||
if (matcher.find()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
Long.valueOf(partial);
|
||||
return true;
|
||||
|
||||
@@ -289,7 +289,7 @@ public class UpdateMapper extends QueryMapper {
|
||||
public MetadataBackedUpdateField(MongoPersistentEntity<?> entity, String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
super(key.replaceAll("\\.\\$(\\[.*\\])?", ""), entity, mappingContext);
|
||||
super(key.replaceAll("\\.\\$", ""), entity, mappingContext);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
|
||||
@@ -17,29 +17,12 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Repeatable;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Mark a class to use compound indexes.
|
||||
* <p />
|
||||
* <p>
|
||||
* <b>NOTE: This annotation is repeatable according to Java 8 conventions using {@link CompoundIndexes#value()} as
|
||||
* container.</b>
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* @CompoundIndex(def = "{'firstname': 1, 'lastname': 1}")
|
||||
* @CompoundIndex(def = "{'address.city': 1, 'address.street': 1}")
|
||||
* class Person {
|
||||
* String firstname;
|
||||
* String lastname;
|
||||
*
|
||||
* Address address;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
@@ -49,31 +32,14 @@ import java.lang.annotation.Target;
|
||||
*/
|
||||
@Target({ ElementType.TYPE })
|
||||
@Documented
|
||||
@Repeatable(CompoundIndexes.class)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface CompoundIndex {
|
||||
|
||||
/**
|
||||
* The actual index definition in JSON format or a {@link org.springframework.expression.spel.standard.SpelExpression
|
||||
* template expression} resolving to either a JSON String or a {@link org.bson.Document}. The keys of the JSON
|
||||
* document are the fields to be indexed, the values define the index direction (1 for ascending, -1 for descending).
|
||||
* <br />
|
||||
* The actual index definition in JSON format. The keys of the JSON document are the fields to be indexed, the values
|
||||
* define the index direction (1 for ascending, -1 for descending). <br />
|
||||
* If left empty on nested document, the whole document will be indexed.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* @CompoundIndex(def = "{'h1': 1, 'h2': 1}")
|
||||
* class JsonStringIndexDefinition {
|
||||
* String h1, h2;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(def = "#{T(org.bson.Document).parse("{ 'h1': 1, 'h2': 1 }")}")
|
||||
* class ExpressionIndexDefinition {
|
||||
* String h1, h2;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
String def() default "";
|
||||
@@ -89,8 +55,7 @@ public @interface CompoundIndex {
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
@@ -98,54 +63,55 @@ public @interface CompoundIndex {
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @deprecated since 2.1. No longer supported by MongoDB as of server version 3.0.
|
||||
*/
|
||||
@Deprecated
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name of the index to be created either as plain value or as
|
||||
* {@link org.springframework.expression.spel.standard.SpelExpression template expression}. <br />
|
||||
* The name of the index to be created. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
* The structure below
|
||||
*
|
||||
* <pre class="code">
|
||||
* <pre>
|
||||
* <code>
|
||||
* @Document
|
||||
* class Root {
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* Hybrid hybrid;
|
||||
* Nested nested;
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* @CompoundIndex(name = "compound_index", def = "{'h1': 1, 'h2': 1}")
|
||||
* class Hybrid {
|
||||
* String h1, h2;
|
||||
* String h1, h2;
|
||||
* }
|
||||
*
|
||||
* @CompoundIndex(name = "compound_index", def = "{'n1': 1, 'n2': 1}")
|
||||
* class Nested {
|
||||
* String n1, n2;
|
||||
* String n1, n2;
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* resolves in the following index structures
|
||||
*
|
||||
* <pre class="code">
|
||||
* <pre>
|
||||
* <code>
|
||||
* db.root.createIndex( { hybrid.h1: 1, hybrid.h2: 1 } , { name: "hybrid.compound_index" } )
|
||||
* db.root.createIndex( { nested.n1: 1, nested.n2: 1 } , { name: "nested.compound_index" } )
|
||||
* db.hybrid.createIndex( { h1: 1, h2: 1 } , { name: "compound_index" } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @return
|
||||
@@ -165,8 +131,7 @@ public @interface CompoundIndex {
|
||||
* If {@literal true} the index will be created in the background.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
|
||||
@@ -15,24 +15,15 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Container annotation that allows to collect multiple {@link CompoundIndex} annotations.
|
||||
* <p>
|
||||
* Can be used natively, declaring several nested {@link CompoundIndex} annotations. Can also be used in conjunction
|
||||
* with Java 8's support for <em>repeatable annotations</em>, where {@link CompoundIndex} can simply be declared several
|
||||
* times on the same {@linkplain ElementType#TYPE type}, implicitly generating this container annotation.
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Christoph Strobl
|
||||
* @author Jon Brisbin <jbrisbin@vmware.com>
|
||||
*/
|
||||
@Target({ ElementType.TYPE })
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface CompoundIndexes {
|
||||
|
||||
|
||||
@@ -1,216 +0,0 @@
|
||||
/*
|
||||
* Copyright 2012-2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Duration format styles.
|
||||
* <p/>
|
||||
* Fork of {@code org.springframework.boot.convert.DurationStyle}.
|
||||
*
|
||||
* @author Phillip Webb
|
||||
* @since 2.2
|
||||
*/
|
||||
enum DurationStyle {
|
||||
|
||||
/**
|
||||
* Simple formatting, for example '1s'.
|
||||
*/
|
||||
SIMPLE("^([\\+\\-]?\\d+)([a-zA-Z]{0,2})$") {
|
||||
|
||||
@Override
|
||||
public Duration parse(String value, @Nullable ChronoUnit unit) {
|
||||
try {
|
||||
Matcher matcher = matcher(value);
|
||||
Assert.state(matcher.matches(), "Does not match simple duration pattern");
|
||||
String suffix = matcher.group(2);
|
||||
return (StringUtils.hasLength(suffix) ? Unit.fromSuffix(suffix) : Unit.fromChronoUnit(unit))
|
||||
.parse(matcher.group(1));
|
||||
} catch (Exception ex) {
|
||||
throw new IllegalArgumentException("'" + value + "' is not a valid simple duration", ex);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* ISO-8601 formatting.
|
||||
*/
|
||||
ISO8601("^[\\+\\-]?P.*$") {
|
||||
|
||||
@Override
|
||||
public Duration parse(String value, @Nullable ChronoUnit unit) {
|
||||
try {
|
||||
return Duration.parse(value);
|
||||
} catch (Exception ex) {
|
||||
throw new IllegalArgumentException("'" + value + "' is not a valid ISO-8601 duration", ex);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final Pattern pattern;
|
||||
|
||||
DurationStyle(String pattern) {
|
||||
this.pattern = Pattern.compile(pattern);
|
||||
}
|
||||
|
||||
protected final boolean matches(String value) {
|
||||
return this.pattern.matcher(value).matches();
|
||||
}
|
||||
|
||||
protected final Matcher matcher(String value) {
|
||||
return this.pattern.matcher(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given value to a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @return a duration
|
||||
*/
|
||||
public Duration parse(String value) {
|
||||
return parse(value, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given value to a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @param unit the duration unit to use if the value doesn't specify one ({@code null} will default to ms)
|
||||
* @return a duration
|
||||
*/
|
||||
public abstract Duration parse(String value, @Nullable ChronoUnit unit);
|
||||
|
||||
/**
|
||||
* Detect the style then parse the value to return a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @return the parsed duration
|
||||
* @throws IllegalStateException if the value is not a known style or cannot be parsed
|
||||
*/
|
||||
public static Duration detectAndParse(String value) {
|
||||
return detectAndParse(value, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the style then parse the value to return a duration.
|
||||
*
|
||||
* @param value the value to parse
|
||||
* @param unit the duration unit to use if the value doesn't specify one ({@code null} will default to ms)
|
||||
* @return the parsed duration
|
||||
* @throws IllegalStateException if the value is not a known style or cannot be parsed
|
||||
*/
|
||||
public static Duration detectAndParse(String value, @Nullable ChronoUnit unit) {
|
||||
return detect(value).parse(value, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect the style from the given source value.
|
||||
*
|
||||
* @param value the source value
|
||||
* @return the duration style
|
||||
* @throws IllegalStateException if the value is not a known style
|
||||
*/
|
||||
public static DurationStyle detect(String value) {
|
||||
Assert.notNull(value, "Value must not be null");
|
||||
for (DurationStyle candidate : values()) {
|
||||
if (candidate.matches(value)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("'" + value + "' is not a valid duration");
|
||||
}
|
||||
|
||||
/**
|
||||
* Units that we support.
|
||||
*/
|
||||
enum Unit {
|
||||
|
||||
/**
|
||||
* Milliseconds.
|
||||
*/
|
||||
MILLIS(ChronoUnit.MILLIS, "ms", Duration::toMillis),
|
||||
|
||||
/**
|
||||
* Seconds.
|
||||
*/
|
||||
SECONDS(ChronoUnit.SECONDS, "s", Duration::getSeconds),
|
||||
|
||||
/**
|
||||
* Minutes.
|
||||
*/
|
||||
MINUTES(ChronoUnit.MINUTES, "m", Duration::toMinutes),
|
||||
|
||||
/**
|
||||
* Hours.
|
||||
*/
|
||||
HOURS(ChronoUnit.HOURS, "h", Duration::toHours),
|
||||
|
||||
/**
|
||||
* Days.
|
||||
*/
|
||||
DAYS(ChronoUnit.DAYS, "d", Duration::toDays);
|
||||
|
||||
private final ChronoUnit chronoUnit;
|
||||
|
||||
private final String suffix;
|
||||
|
||||
private Function<Duration, Long> longValue;
|
||||
|
||||
Unit(ChronoUnit chronoUnit, String suffix, Function<Duration, Long> toUnit) {
|
||||
this.chronoUnit = chronoUnit;
|
||||
this.suffix = suffix;
|
||||
this.longValue = toUnit;
|
||||
}
|
||||
|
||||
public Duration parse(String value) {
|
||||
return Duration.of(Long.valueOf(value), this.chronoUnit);
|
||||
}
|
||||
|
||||
public long longValue(Duration value) {
|
||||
return this.longValue.apply(value);
|
||||
}
|
||||
|
||||
public static Unit fromChronoUnit(ChronoUnit chronoUnit) {
|
||||
if (chronoUnit == null) {
|
||||
return Unit.MILLIS;
|
||||
}
|
||||
for (Unit candidate : values()) {
|
||||
if (candidate.chronoUnit == chronoUnit) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown unit " + chronoUnit);
|
||||
}
|
||||
|
||||
public static Unit fromSuffix(String suffix) {
|
||||
for (Unit candidate : values()) {
|
||||
if (candidate.suffix.equalsIgnoreCase(suffix)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown unit '" + suffix + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -34,8 +34,8 @@ import java.lang.annotation.Target;
|
||||
public @interface GeoSpatialIndexed {
|
||||
|
||||
/**
|
||||
* Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template
|
||||
* expression}. <br />
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
* <br />
|
||||
@@ -52,7 +52,6 @@ public @interface GeoSpatialIndexed {
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @GeoSpatialIndexed(name="index") Point h1;
|
||||
* @GeoSpatialIndexed(name="#{@myBean.indexName}") Point h2;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
@@ -68,7 +67,6 @@ public @interface GeoSpatialIndexed {
|
||||
* db.root.createIndex( { hybrid.h1: "2d" } , { name: "hybrid.index" } )
|
||||
* db.root.createIndex( { nested.n1: "2d" } , { name: "nested.index" } )
|
||||
* db.hybrid.createIndex( { h1: "2d" } , { name: "index" } )
|
||||
* db.hybrid.createIndex( { h2: "2d"} , { name: the value myBean.getIndexName() returned } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation for a property that should be used as key for a
|
||||
* <a href="https://docs.mongodb.com/manual/core/index-hashed/">Hashed Index</a>. If used on a simple property, the
|
||||
* index uses a hashing function to compute the hash of the value of the index field. Added to a property of complex
|
||||
* type the embedded document is collapsed and the hash computed for the entire object.
|
||||
* <p />
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* public class DomainType {
|
||||
*
|
||||
* @HashIndexed @Id String id;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* {@link HashIndexed} can also be used as meta {@link java.lang.annotation.Annotation} to create composed annotations:
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Indexed
|
||||
* @HashIndexed
|
||||
* @Retention(RetentionPolicy.RUNTIME)
|
||||
* public @interface IndexAndHash {
|
||||
*
|
||||
* @AliasFor(annotation = Indexed.class, attribute = "name")
|
||||
* String name() default "";
|
||||
* }
|
||||
*
|
||||
* @Document
|
||||
* public class DomainType {
|
||||
*
|
||||
* @ComposedHashIndexed(name = "idx-name") String value;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
* @see HashedIndex
|
||||
*/
|
||||
@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface HashIndexed {
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link IndexDefinition} implementation for MongoDB
|
||||
* <a href="https://docs.mongodb.com/manual/core/index-hashed/">Hashed Indexes</a> maintaining entries with hashes of
|
||||
* the values of the indexed field.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
public class HashedIndex implements IndexDefinition {
|
||||
|
||||
private final String field;
|
||||
|
||||
private HashedIndex(String field) {
|
||||
|
||||
Assert.hasText(field, "Field must not be null nor empty!");
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link HashedIndex} for the given field.
|
||||
*
|
||||
* @param field must not be {@literal null} nor empty.
|
||||
* @return
|
||||
*/
|
||||
public static HashedIndex hashed(String field) {
|
||||
return new HashedIndex(field);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexKeys()
|
||||
*/
|
||||
@Override
|
||||
public Document getIndexKeys() {
|
||||
return new Document(field, "hashed");
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.index.IndexDefinition#getIndexOptions()
|
||||
*/
|
||||
@Override
|
||||
public Document getIndexOptions() {
|
||||
return new Document();
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
@@ -117,20 +116,6 @@ public class Index implements IndexDefinition {
|
||||
return expire(value, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the TTL.
|
||||
*
|
||||
* @param timeout must not be {@literal null}.
|
||||
* @return this.
|
||||
* @throws IllegalArgumentException if given {@literal timeout} is {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Index expire(Duration timeout) {
|
||||
|
||||
Assert.notNull(timeout, "Timeout must not be null!");
|
||||
return expire(timeout.getSeconds());
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies TTL with given {@link TimeUnit}.
|
||||
*
|
||||
|
||||
@@ -26,10 +26,11 @@ import org.springframework.util.ObjectUtils;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public final class IndexField {
|
||||
|
||||
enum Type {
|
||||
GEO, TEXT, DEFAULT, HASH;
|
||||
GEO, TEXT, DEFAULT;
|
||||
}
|
||||
|
||||
private final String key;
|
||||
@@ -48,9 +49,7 @@ public final class IndexField {
|
||||
if (Type.GEO.equals(type) || Type.TEXT.equals(type)) {
|
||||
Assert.isNull(direction, "Geo/Text indexes must not have a direction!");
|
||||
} else {
|
||||
if (!Type.HASH.equals(type)) {
|
||||
Assert.notNull(direction, "Default indexes require a direction");
|
||||
}
|
||||
Assert.notNull(direction, "Default indexes require a direction");
|
||||
}
|
||||
|
||||
this.key = key;
|
||||
@@ -66,17 +65,6 @@ public final class IndexField {
|
||||
return new IndexField(key, order, Type.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@literal hashed} {@link IndexField} for the given key.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @return new instance of {@link IndexField}.
|
||||
* @since 2.2
|
||||
*/
|
||||
static IndexField hashed(String key) {
|
||||
return new IndexField(key, null, Type.HASH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geo {@link IndexField} for the given key.
|
||||
*
|
||||
@@ -132,16 +120,6 @@ public final class IndexField {
|
||||
return Type.TEXT.equals(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link IndexField} is a {@literal hashed}.
|
||||
*
|
||||
* @return {@literal true} if {@link IndexField} is hashed.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isHashed() {
|
||||
return Type.HASH.equals(type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import static org.springframework.data.domain.Sort.Direction.*;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
@@ -26,14 +25,12 @@ import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Index information for a MongoDB index.
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
@@ -51,7 +48,6 @@ public class IndexInfo {
|
||||
private final boolean unique;
|
||||
private final boolean sparse;
|
||||
private final String language;
|
||||
private @Nullable Duration expireAfter;
|
||||
private @Nullable String partialFilterExpression;
|
||||
private @Nullable Document collation;
|
||||
|
||||
@@ -96,17 +92,12 @@ public class IndexInfo {
|
||||
|
||||
} else {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals("hashed", value)) {
|
||||
indexFields.add(IndexField.hashed(key));
|
||||
} else {
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
if (ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, ASC));
|
||||
} else if (MINUS_ONE.equals(keyValue)) {
|
||||
indexFields.add(IndexField.create(key, DESC));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -117,21 +108,28 @@ public class IndexInfo {
|
||||
boolean sparse = sourceDocument.containsKey("sparse") ? (Boolean) sourceDocument.get("sparse") : false;
|
||||
String language = sourceDocument.containsKey("default_language") ? (String) sourceDocument.get("default_language")
|
||||
: "";
|
||||
String partialFilter = sourceDocument.containsKey("partialFilterExpression")
|
||||
? ((Document) sourceDocument.get("partialFilterExpression")).toJson()
|
||||
: null;
|
||||
|
||||
String partialFilter = extractPartialFilterString(sourceDocument);
|
||||
|
||||
IndexInfo info = new IndexInfo(indexFields, name, unique, sparse, language);
|
||||
info.partialFilterExpression = partialFilter;
|
||||
info.collation = sourceDocument.get("collation", Document.class);
|
||||
return info;
|
||||
}
|
||||
|
||||
if (sourceDocument.containsKey("expireAfterSeconds")) {
|
||||
/**
|
||||
* @param sourceDocument
|
||||
* @return the {@link String} representation of the partial filter {@link Document}.
|
||||
* @since 2.1.11
|
||||
*/
|
||||
@Nullable
|
||||
private static String extractPartialFilterString(Document sourceDocument) {
|
||||
|
||||
Number expireAfterSeconds = sourceDocument.get("expireAfterSeconds", Number.class);
|
||||
info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class));
|
||||
if (!sourceDocument.containsKey("partialFilterExpression")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return info;
|
||||
return BsonUtils.toJson(sourceDocument.get("partialFilterExpression", Document.class));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -201,30 +199,11 @@ public class IndexInfo {
|
||||
return Optional.ofNullable(collation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the duration after which documents within the index expire.
|
||||
*
|
||||
* @return the expiration time if set, {@link Optional#empty()} otherwise.
|
||||
* @since 2.2
|
||||
*/
|
||||
public Optional<Duration> getExpireAfter() {
|
||||
return Optional.ofNullable(expireAfter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a hashed index field is present.
|
||||
* @since 2.2
|
||||
*/
|
||||
public boolean isHashed() {
|
||||
return getIndexFields().stream().anyMatch(IndexField::isHashed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
return "IndexInfo [indexFields=" + indexFields + ", name=" + name + ", unique=" + unique + ", sparse=" + sparse
|
||||
+ ", language=" + language + ", partialFilterExpression=" + partialFilterExpression + ", collation=" + collation
|
||||
+ ", expireAfterSeconds=" + ObjectUtils.nullSafeToString(expireAfter) + "]";
|
||||
+ "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -238,7 +217,6 @@ public class IndexInfo {
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(language);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(partialFilterExpression);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(collation);
|
||||
result += 31 * ObjectUtils.nullSafeHashCode(expireAfter);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -280,10 +258,8 @@ public class IndexInfo {
|
||||
if (!ObjectUtils.nullSafeEquals(partialFilterExpression, other.partialFilterExpression)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(collation, other.collation)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(expireAfter, other.expireAfter)) {
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(collation, collation)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
||||
@@ -15,58 +15,25 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
* @author Mark Paluch
|
||||
* @since 1.5
|
||||
*/
|
||||
public interface IndexResolver {
|
||||
interface IndexResolver {
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexResolver} given {@link MongoMappingContext}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @return the new {@link IndexResolver}.
|
||||
* @since 2.2
|
||||
*/
|
||||
static IndexResolver create(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "MongoMappingContext must not be null!");
|
||||
|
||||
return new MongoPersistentEntityIndexResolver(mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s
|
||||
* are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s are created
|
||||
* for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param typeInformation
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
*/
|
||||
Iterable<? extends IndexDefinition> resolveIndexFor(TypeInformation<?> typeInformation);
|
||||
|
||||
/**
|
||||
* Find and create {@link IndexDefinition}s for properties of given {@link TypeInformation}. {@link IndexDefinition}s
|
||||
* are created for properties and types with {@link Indexed}, {@link CompoundIndexes} or {@link GeoSpatialIndexed}.
|
||||
*
|
||||
* @param entityType
|
||||
* @return Empty {@link Iterable} in case no {@link IndexDefinition} could be resolved for type.
|
||||
* @see 2.2
|
||||
*/
|
||||
default Iterable<? extends IndexDefinition> resolveIndexFor(Class<?> entityType) {
|
||||
return resolveIndexFor(ClassTypeInformation.from(entityType));
|
||||
}
|
||||
Iterable<? extends IndexDefinitionHolder> resolveIndexFor(TypeInformation<?> typeInformation);
|
||||
|
||||
}
|
||||
|
||||
@@ -30,9 +30,8 @@ import java.lang.annotation.Target;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Jordi Llach
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@Target({ ElementType.ANNOTATION_TYPE, ElementType.FIELD })
|
||||
@Target({ElementType.ANNOTATION_TYPE, ElementType.FIELD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface Indexed {
|
||||
|
||||
@@ -40,8 +39,7 @@ public @interface Indexed {
|
||||
* If set to true reject all documents that contain a duplicate value for the indexed field.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-unique/">https://docs.mongodb.org/manual/core/index-unique/</a>
|
||||
*/
|
||||
boolean unique() default false;
|
||||
|
||||
@@ -51,23 +49,20 @@ public @interface Indexed {
|
||||
* If set to true index will skip over any document that is missing the indexed field.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-sparse/">https://docs.mongodb.org/manual/core/index-sparse/</a>
|
||||
*/
|
||||
boolean sparse() default false;
|
||||
|
||||
/**
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping">https://docs.mongodb.org/manual/core/index-creation/#index-creation-duplicate-dropping</a>
|
||||
* @deprecated since 2.1. No longer supported by MongoDB as of server version 3.0.
|
||||
*/
|
||||
@Deprecated
|
||||
boolean dropDups() default false;
|
||||
|
||||
/**
|
||||
* Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template
|
||||
* expression}. <br />
|
||||
* Index name. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity. <br />
|
||||
@@ -85,7 +80,6 @@ public @interface Indexed {
|
||||
* @Document
|
||||
* class Hybrid {
|
||||
* @Indexed(name="index") String h1;
|
||||
* @Indexed(name="#{@myBean.indexName}") String h2;
|
||||
* }
|
||||
*
|
||||
* class Nested {
|
||||
@@ -101,7 +95,6 @@ public @interface Indexed {
|
||||
* db.root.createIndex( { hybrid.h1: 1 } , { name: "hybrid.index" } )
|
||||
* db.root.createIndex( { nested.n1: 1 } , { name: "nested.index" } )
|
||||
* db.hybrid.createIndex( { h1: 1} , { name: "index" } )
|
||||
* db.hybrid.createIndex( { h2: 1} , { name: the value myBean.getIndexName() returned } )
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
@@ -122,8 +115,7 @@ public @interface Indexed {
|
||||
* If {@literal true} the index will be created in the background.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/core/indexes/#background-construction">https://docs.mongodb.org/manual/core/indexes/#background-construction</a>
|
||||
*/
|
||||
boolean background() default false;
|
||||
|
||||
@@ -131,38 +123,7 @@ public @interface Indexed {
|
||||
* Configures the number of seconds after which the collection should expire. Defaults to -1 for no expiry.
|
||||
*
|
||||
* @return
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/tutorial/expire-data/">https://docs.mongodb.org/manual/tutorial/expire-data/</a>
|
||||
* @see <a href="https://docs.mongodb.org/manual/tutorial/expire-data/">https://docs.mongodb.org/manual/tutorial/expire-data/</a>
|
||||
*/
|
||||
int expireAfterSeconds() default -1;
|
||||
|
||||
/**
|
||||
* Alternative for {@link #expireAfterSeconds()} to configure the timeout after which the document should expire.
|
||||
* Defaults to an empty {@link String} for no expiry. Accepts numeric values followed by their unit of measure:
|
||||
* <ul>
|
||||
* <li><b>d</b>: Days</li>
|
||||
* <li><b>h</b>: Hours</li>
|
||||
* <li><b>m</b>: Minutes</li>
|
||||
* <li><b>s</b>: Seconds</li>
|
||||
* <li>Alternatively: A Spring {@literal template expression}. The expression can result in a
|
||||
* {@link java.time.Duration} or a valid expiration {@link String} according to the already mentioned
|
||||
* conventions.</li>
|
||||
* </ul>
|
||||
* Supports ISO-8601 style.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* @Indexed(expireAfter = "10s") String expireAfterTenSeconds;
|
||||
*
|
||||
* @Indexed(expireAfter = "1d") String expireAfterOneDay;
|
||||
*
|
||||
* @Indexed(expireAfter = "P2D") String expireAfterTwoDays;
|
||||
*
|
||||
* @Indexed(expireAfter = "#{@mySpringBean.timeout}") String expireAfterTimeoutObtainedFromSpringBean;
|
||||
* </pre>
|
||||
*
|
||||
* @return empty by default.
|
||||
* @since 2.2
|
||||
*/
|
||||
String expireAfter() default "";
|
||||
}
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
/*
|
||||
* Copyright 2019 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentSkipListSet;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class JustOnceLogger {
|
||||
|
||||
private static final Map<String, Set<String>> KNOWN_LOGS = new ConcurrentHashMap<>();
|
||||
private static final String AUTO_INDEX_CREATION_CONFIG_CHANGE;
|
||||
|
||||
static {
|
||||
AUTO_INDEX_CREATION_CONFIG_CHANGE = "Automatic index creation will be disabled by default as of Spring Data MongoDB 3.x."
|
||||
+ System.lineSeparator()
|
||||
+ "\tPlease use 'MongoMappingContext#setAutoIndexCreation(boolean)' or override 'MongoConfigurationSupport#autoIndexCreation()' to be explicit."
|
||||
+ System.lineSeparator()
|
||||
+ "\tHowever, we recommend setting up indices manually in an application ready block. You may use index derivation there as well."
|
||||
+ System.lineSeparator() + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> @EventListener(ApplicationReadyEvent.class)" + System.lineSeparator() //
|
||||
+ "\t> public void initIndicesAfterStartup() {" + System.lineSeparator() //
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexOperations indexOps = mongoTemplate.indexOps(DomainType.class);" + System.lineSeparator()//
|
||||
+ "\t>" + System.lineSeparator() //
|
||||
+ "\t> IndexResolver resolver = new MongoPersistentEntityIndexResolver(mongoMappingContext);"
|
||||
+ System.lineSeparator() //
|
||||
+ "\t> resolver.resolveIndexFor(DomainType.class).forEach(indexOps::ensureIndex);" + System.lineSeparator() //
|
||||
+ "\t> }" + System.lineSeparator() //
|
||||
+ "\t> -----------------------------------------------------------------------------------------"
|
||||
+ System.lineSeparator();
|
||||
}
|
||||
|
||||
static void logWarnIndexCreationConfigurationChange(String loggerName) {
|
||||
warnOnce(loggerName, AUTO_INDEX_CREATION_CONFIG_CHANGE);
|
||||
}
|
||||
|
||||
static void warnOnce(String loggerName, String message) {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(loggerName);
|
||||
if (!logger.isWarnEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!KNOWN_LOGS.containsKey(loggerName)) {
|
||||
|
||||
KNOWN_LOGS.put(loggerName, new ConcurrentSkipListSet<>(Collections.singleton(message)));
|
||||
logger.warn(message);
|
||||
} else {
|
||||
|
||||
Set<String> messages = KNOWN_LOGS.get(loggerName);
|
||||
if (messages.contains(message)) {
|
||||
return;
|
||||
}
|
||||
|
||||
messages.add(message);
|
||||
logger.warn(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -63,13 +63,11 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
/**
|
||||
* Creates a new {@link MongoPersistentEntityIndexCreator} for the given {@link MongoMappingContext} and
|
||||
* {@link MongoDbFactory}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param indexOperationsProvider must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext,
|
||||
IndexOperationsProvider indexOperationsProvider) {
|
||||
this(mappingContext, indexOperationsProvider, IndexResolver.create(mappingContext));
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider) {
|
||||
this(mappingContext, indexOperationsProvider, new MongoPersistentEntityIndexResolver(mappingContext));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -80,8 +78,8 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
* @param indexResolver must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext,
|
||||
IndexOperationsProvider indexOperationsProvider, IndexResolver indexResolver) {
|
||||
public MongoPersistentEntityIndexCreator(MongoMappingContext mappingContext, IndexOperationsProvider indexOperationsProvider,
|
||||
IndexResolver indexResolver) {
|
||||
|
||||
Assert.notNull(mappingContext, "MongoMappingContext must not be null!");
|
||||
Assert.notNull(indexOperationsProvider, "IndexOperationsProvider must not be null!");
|
||||
@@ -110,7 +108,6 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
// Double check type as Spring infrastructure does not consider nested generics
|
||||
if (entity instanceof MongoPersistentEntity) {
|
||||
|
||||
checkForIndexes((MongoPersistentEntity<?>) entity);
|
||||
}
|
||||
}
|
||||
@@ -134,17 +131,7 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
private void checkForAndCreateIndexes(MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity.isAnnotationPresent(Document.class)) {
|
||||
|
||||
String collection = entity.getCollection();
|
||||
|
||||
for (IndexDefinition indexDefinition : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
|
||||
JustOnceLogger.logWarnIndexCreationConfigurationChange(this.getClass().getName());
|
||||
|
||||
IndexDefinitionHolder indexToCreate = indexDefinition instanceof IndexDefinitionHolder
|
||||
? (IndexDefinitionHolder) indexDefinition
|
||||
: new IndexDefinitionHolder("", indexDefinition, collection);
|
||||
|
||||
for (IndexDefinitionHolder indexToCreate : indexResolver.resolveIndexFor(entity.getTypeInformation())) {
|
||||
createIndex(indexToCreate);
|
||||
}
|
||||
}
|
||||
@@ -159,8 +146,8 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
|
||||
} catch (UncategorizedMongoDbException ex) {
|
||||
|
||||
if (ex.getCause() instanceof MongoException
|
||||
&& MongoDbErrorCodes.isDataIntegrityViolationCode(((MongoException) ex.getCause()).getCode())) {
|
||||
if (ex.getCause() instanceof MongoException &&
|
||||
MongoDbErrorCodes.isDataIntegrityViolationCode(((MongoException) ex.getCause()).getCode())) {
|
||||
|
||||
IndexInfo existingIndex = fetchIndexInformation(indexDefinition);
|
||||
String message = "Cannot create index for '%s' in collection '%s' with keys '%s' and options '%s'.";
|
||||
|
||||
@@ -19,7 +19,6 @@ import lombok.AccessLevel;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
@@ -33,35 +32,25 @@ import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.AssociationHandler;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.CycleGuard.Path;
|
||||
import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.TextIndexIncludeOptions.IncludeStrategy;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexDefinitionBuilder;
|
||||
import org.springframework.data.mongodb.core.index.TextIndexDefinition.TextIndexedFieldSpec;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.spel.EvaluationContextProvider;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.Expression;
|
||||
import org.springframework.expression.ParserContext;
|
||||
import org.springframework.expression.common.LiteralExpression;
|
||||
import org.springframework.expression.spel.standard.SpelExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -79,18 +68,15 @@ import org.springframework.util.StringUtils;
|
||||
public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class);
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser();
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private EvaluationContextProvider evaluationContextProvider = EvaluationContextProvider.DEFAULT;
|
||||
private final MongoMappingContext mappingContext;
|
||||
|
||||
/**
|
||||
* Create new {@link MongoPersistentEntityIndexResolver}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
*/
|
||||
public MongoPersistentEntityIndexResolver(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
public MongoPersistentEntityIndexResolver(MongoMappingContext mappingContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "Mapping context must not be null in order to resolve index definitions");
|
||||
this.mappingContext = mappingContext;
|
||||
@@ -105,20 +91,19 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the {@link IndexDefinition}s for a given {@literal root} entity by traversing
|
||||
* {@link MongoPersistentProperty} scanning for index annotations {@link Indexed}, {@link CompoundIndex} and
|
||||
* {@link GeospatialIndex}. The given {@literal root} has therefore to be annotated with {@link Document}.
|
||||
* Resolve the {@link IndexDefinition}s for given {@literal root} entity by traversing {@link MongoPersistentProperty}
|
||||
* scanning for index annotations {@link Indexed}, {@link CompoundIndex} and {@link GeospatialIndex}. The given
|
||||
* {@literal root} has therefore to be annotated with {@link Document}.
|
||||
*
|
||||
* @param root must not be null.
|
||||
* @return List of {@link IndexDefinitionHolder}. Will never be {@code null}.
|
||||
* @throws IllegalArgumentException in case of missing {@link Document} annotation marking root entities.
|
||||
*/
|
||||
public List<IndexDefinitionHolder> resolveIndexForEntity(MongoPersistentEntity<?> root) {
|
||||
public List<IndexDefinitionHolder> resolveIndexForEntity(final MongoPersistentEntity<?> root) {
|
||||
|
||||
Assert.notNull(root, "MongoPersistentEntity must not be null!");
|
||||
Assert.notNull(root, "Index cannot be resolved for given 'null' entity.");
|
||||
Document document = root.findAnnotation(Document.class);
|
||||
Assert.notNull(document, () -> String
|
||||
.format("Entity %s is not a collection root. Make sure to annotate it with @Document!", root.getName()));
|
||||
Assert.notNull(document, "Given entity is not collection root.");
|
||||
|
||||
List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
String collection = root.getCollection();
|
||||
@@ -137,15 +122,17 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
List<IndexDefinitionHolder> indexes, CycleGuard guard) {
|
||||
|
||||
try {
|
||||
String collection = root.getCollection();
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexes.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
persistentProperty.getFieldName(), Path.of(persistentProperty), root.getCollection(), guard));
|
||||
persistentProperty.getFieldName(), Path.of(persistentProperty), collection, guard));
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), root.getCollection(), persistentProperty);
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(
|
||||
persistentProperty.getFieldName(), collection, persistentProperty);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
}
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
@@ -172,14 +159,14 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity));
|
||||
|
||||
entity.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
.guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard));
|
||||
.guradAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard));
|
||||
|
||||
indexInformation.addAll(resolveIndexesForDbrefs(dotPath, collection, entity));
|
||||
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
private void guardAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath,
|
||||
private void guradAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath,
|
||||
Path path, String collection, List<IndexDefinitionHolder> indexes, CycleGuard guard) {
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "") + persistentProperty.getFieldName();
|
||||
@@ -196,30 +183,25 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
persistentProperty);
|
||||
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
}
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> createIndexDefinitionHolderForProperty(String dotPath, String collection,
|
||||
@Nullable
|
||||
private IndexDefinitionHolder createIndexDefinitionHolderForProperty(String dotPath, String collection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
List<IndexDefinitionHolder> indices = new ArrayList<>(2);
|
||||
|
||||
if (persistentProperty.isAnnotationPresent(Indexed.class)) {
|
||||
indices.add(createIndexDefinition(dotPath, collection, persistentProperty));
|
||||
return createIndexDefinition(dotPath, collection, persistentProperty);
|
||||
} else if (persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
indices.add(createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty));
|
||||
return createGeoSpatialIndexDefinition(dotPath, collection, persistentProperty);
|
||||
}
|
||||
|
||||
if (persistentProperty.isAnnotationPresent(HashIndexed.class)) {
|
||||
indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty));
|
||||
}
|
||||
|
||||
return indices;
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> potentiallyCreateCompoundIndexDefinitions(String dotPath, String collection,
|
||||
@@ -263,10 +245,6 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
|
||||
if (root.hasCollation()) {
|
||||
indexDefinitionBuilder.withSimpleCollation();
|
||||
}
|
||||
|
||||
TextIndexDefinition indexDefinition = indexDefinitionBuilder.build();
|
||||
|
||||
if (!indexDefinition.hasFieldSpec()) {
|
||||
@@ -334,12 +312,11 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of a given
|
||||
* type.
|
||||
* Create {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} for {@link CompoundIndexes} of given type.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param fallbackCollection
|
||||
* @param entity
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
protected List<IndexDefinitionHolder> createCompoundIndexDefinitions(String dotPath, String fallbackCollection,
|
||||
@@ -363,14 +340,15 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return indexDefinitions;
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, String collection, CompoundIndex index,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
CompoundIndexDefinition indexDefinition = new CompoundIndexDefinition(
|
||||
resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def(), entity));
|
||||
resolveCompoundIndexKeyFromStringDefinition(dotPath, index.def()));
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null));
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, null));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
@@ -388,8 +366,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString,
|
||||
PersistentEntity<?, ?> entity) {
|
||||
private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString) {
|
||||
|
||||
if (!StringUtils.hasText(dotPath) && !StringUtils.hasText(keyDefinitionString)) {
|
||||
throw new InvalidDataAccessApiUsageException("Cannot create index on root level for empty keys.");
|
||||
@@ -399,11 +376,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new org.bson.Document(dotPath, 1);
|
||||
}
|
||||
|
||||
Object keyDefToUse = evaluate(keyDefinitionString, getEvaluationContextForProperty(entity));
|
||||
|
||||
org.bson.Document dbo = (keyDefToUse instanceof org.bson.Document) ? (org.bson.Document) keyDefToUse
|
||||
: org.bson.Document.parse(ObjectUtils.nullSafeToString(keyDefToUse));
|
||||
|
||||
org.bson.Document dbo = org.bson.Document.parse(keyDefinitionString);
|
||||
if (!StringUtils.hasText(dotPath)) {
|
||||
return dbo;
|
||||
}
|
||||
@@ -417,19 +390,19 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for a given
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link Indexed} for given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persistentProperty
|
||||
* @param persitentProperty
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
protected IndexDefinitionHolder createIndexDefinition(String dotPath, String collection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
MongoPersistentProperty persitentProperty) {
|
||||
|
||||
Indexed index = persistentProperty.findAnnotation(Indexed.class);
|
||||
Indexed index = persitentProperty.findAnnotation(Indexed.class);
|
||||
|
||||
if (index == null) {
|
||||
return null;
|
||||
@@ -439,8 +412,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
IndexDirection.ASCENDING.equals(index.direction()) ? Sort.Direction.ASC : Sort.Direction.DESC);
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition
|
||||
.named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty));
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persitentProperty));
|
||||
}
|
||||
|
||||
if (index.unique()) {
|
||||
@@ -459,89 +431,9 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexDefinition.expire(index.expireAfterSeconds(), TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(index.expireAfter())) {
|
||||
|
||||
if (index.expireAfterSeconds() >= 0) {
|
||||
throw new IllegalStateException(String.format(
|
||||
"@Indexed already defines an expiration timeout of %s seconds via Indexed#expireAfterSeconds. Please make to use either expireAfterSeconds or expireAfter.",
|
||||
index.expireAfterSeconds()));
|
||||
}
|
||||
|
||||
Duration timeout = computeIndexTimeout(index.expireAfter(),
|
||||
getEvaluationContextForProperty(persistentProperty.getOwner()));
|
||||
if (!timeout.isZero() && !timeout.isNegative()) {
|
||||
indexDefinition.expire(timeout);
|
||||
}
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given
|
||||
* {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @param dotPath The properties {@literal "dot"} path representation from its document root.
|
||||
* @param collection
|
||||
* @param persistentProperty
|
||||
* @return
|
||||
* @since 2.2
|
||||
*/
|
||||
@Nullable
|
||||
protected IndexDefinitionHolder createHashedIndexDefinition(String dotPath, String collection,
|
||||
MongoPersistentProperty persistentProperty) {
|
||||
|
||||
HashIndexed index = persistentProperty.findAnnotation(HashIndexed.class);
|
||||
|
||||
if (index == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, HashedIndex.hashed(dotPath), collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default {@link EvaluationContext}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
protected EvaluationContext getEvaluationContext() {
|
||||
return evaluationContextProvider.getEvaluationContext(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link EvaluationContext} for a given {@link PersistentEntity entity} the default one.
|
||||
*
|
||||
* @param persistentEntity can be {@literal null}
|
||||
* @return
|
||||
*/
|
||||
private EvaluationContext getEvaluationContextForProperty(@Nullable PersistentEntity<?, ?> persistentEntity) {
|
||||
|
||||
if (persistentEntity == null || !(persistentEntity instanceof BasicMongoPersistentEntity)) {
|
||||
return getEvaluationContext();
|
||||
}
|
||||
|
||||
EvaluationContext contextFromEntity = ((BasicMongoPersistentEntity<?>) persistentEntity).getEvaluationContext(null);
|
||||
|
||||
if (contextFromEntity != null && !EvaluationContextProvider.DEFAULT.equals(contextFromEntity)) {
|
||||
return contextFromEntity;
|
||||
}
|
||||
|
||||
return getEvaluationContext();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link EvaluationContextProvider} used for obtaining the {@link EvaluationContext} used to compute
|
||||
* {@link org.springframework.expression.spel.standard.SpelExpression expressions}.
|
||||
*
|
||||
* @param evaluationContextProvider must not be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public void setEvaluationContextProvider(EvaluationContextProvider evaluationContextProvider) {
|
||||
this.evaluationContextProvider = evaluationContextProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link IndexDefinition} wrapped in {@link IndexDefinitionHolder} out of {@link GeoSpatialIndexed} for
|
||||
* {@link MongoPersistentProperty}.
|
||||
@@ -566,8 +458,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexDefinition.withMin(index.min()).withMax(index.max());
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition
|
||||
.named(pathAwareIndexName(index.name(), dotPath, persistentProperty.getOwner(), persistentProperty));
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, persistentProperty));
|
||||
}
|
||||
|
||||
indexDefinition.typed(index.type()).withBucketSize(index.bucketSize()).withAdditionalField(index.additionalField());
|
||||
@@ -575,18 +466,9 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private String pathAwareIndexName(String indexName, String dotPath, @Nullable PersistentEntity<?, ?> entity,
|
||||
@Nullable MongoPersistentProperty property) {
|
||||
private String pathAwareIndexName(String indexName, String dotPath, @Nullable MongoPersistentProperty property) {
|
||||
|
||||
String nameToUse = "";
|
||||
if (StringUtils.hasText(indexName)) {
|
||||
|
||||
Object result = evaluate(indexName, getEvaluationContextForProperty(entity));
|
||||
|
||||
if (result != null) {
|
||||
nameToUse = ObjectUtils.nullSafeToString(result);
|
||||
}
|
||||
}
|
||||
String nameToUse = StringUtils.hasText(indexName) ? indexName : "";
|
||||
|
||||
if (!StringUtils.hasText(dotPath) || (property != null && dotPath.equals(property.getFieldName()))) {
|
||||
return StringUtils.hasText(nameToUse) ? nameToUse : dotPath;
|
||||
@@ -624,56 +506,14 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
propertyDotPath));
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
IndexDefinitionHolder indexDefinitionHolder = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
property);
|
||||
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
if (indexDefinitionHolder != null) {
|
||||
indexes.add(indexDefinitionHolder);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the index timeout value by evaluating a potential
|
||||
* {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value.
|
||||
*
|
||||
* @param timeoutValue must not be {@literal null}.
|
||||
* @param evaluationContext must not be {@literal null}.
|
||||
* @return never {@literal null}
|
||||
* @since 2.2
|
||||
* @throws IllegalArgumentException for invalid duration values.
|
||||
*/
|
||||
private static Duration computeIndexTimeout(String timeoutValue, EvaluationContext evaluationContext) {
|
||||
|
||||
Object evaluatedTimeout = evaluate(timeoutValue, evaluationContext);
|
||||
|
||||
if (evaluatedTimeout == null) {
|
||||
return Duration.ZERO;
|
||||
}
|
||||
|
||||
if (evaluatedTimeout instanceof Duration) {
|
||||
return (Duration) evaluatedTimeout;
|
||||
}
|
||||
|
||||
String val = evaluatedTimeout.toString();
|
||||
|
||||
if (val == null) {
|
||||
return Duration.ZERO;
|
||||
}
|
||||
|
||||
return DurationStyle.detectAndParse(val);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static Object evaluate(String value, EvaluationContext evaluationContext) {
|
||||
|
||||
Expression expression = PARSER.parseExpression(value, ParserContext.TEMPLATE_EXPRESSION);
|
||||
if (expression instanceof LiteralExpression) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return expression.getValue(evaluationContext, Object.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user