Compare commits
159 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
87ab1ac48c | ||
|
|
454afd9877 | ||
|
|
45971b212c | ||
|
|
68370c16fb | ||
|
|
d2c9b47366 | ||
|
|
4d7ee0e741 | ||
|
|
e7f3a2436d | ||
|
|
4ef1ff6aff | ||
|
|
b6ad32d7d4 | ||
|
|
e875f9ea33 | ||
|
|
9db9d16cf8 | ||
|
|
f00991dc29 | ||
|
|
bacbd7133e | ||
|
|
f38f6d67ab | ||
|
|
3f27e8e152 | ||
|
|
23177fef0c | ||
|
|
f3b90c2b8a | ||
|
|
d57c5a9529 | ||
|
|
986ea39f90 | ||
|
|
5bd7ff1413 | ||
|
|
93b9f23b07 | ||
|
|
42ab7d2f63 | ||
|
|
a6a2f0bde9 | ||
|
|
7d0b070d1f | ||
|
|
81bc3c599b | ||
|
|
403f0019d5 | ||
|
|
4f65bb0810 | ||
|
|
ef29e69a87 | ||
|
|
5cffb3c07c | ||
|
|
61d3a0bd1f | ||
|
|
82d67c1dbb | ||
|
|
85a30ec915 | ||
|
|
c70c29b2c7 | ||
|
|
2a5ae0da37 | ||
|
|
826015e9c1 | ||
|
|
9dda0a2f93 | ||
|
|
7dfe460433 | ||
|
|
73a0f04933 | ||
|
|
a1c165921d | ||
|
|
3872b379cd | ||
|
|
98fe043b95 | ||
|
|
c217618d9d | ||
|
|
b1020d19ba | ||
|
|
a481636429 | ||
|
|
efa9a2d408 | ||
|
|
149a703ecc | ||
|
|
2b715c54d3 | ||
|
|
ece261aadb | ||
|
|
dae0ac3b4d | ||
|
|
5ab75eb65a | ||
|
|
e96ef8e18f | ||
|
|
82af678cab | ||
|
|
6ed274bd9b | ||
|
|
48ac7e75ba | ||
|
|
a51c96298f | ||
|
|
f1354c4508 | ||
|
|
ff7588f648 | ||
|
|
124036fe36 | ||
|
|
80c5b536df | ||
|
|
2ee33b1444 | ||
|
|
eec6cea507 | ||
|
|
90d03d92d8 | ||
|
|
9a48e32565 | ||
|
|
ede6927b65 | ||
|
|
2edc29f758 | ||
|
|
5bd9bcca75 | ||
|
|
54f75e653b | ||
|
|
7b33f56e33 | ||
|
|
829eed7d6c | ||
|
|
5d3a3e1fe2 | ||
|
|
e71ec4fc41 | ||
|
|
f065e295e9 | ||
|
|
d23e1b3247 | ||
|
|
a1e8e6f3bc | ||
|
|
e4030197e8 | ||
|
|
2885c35511 | ||
|
|
a2a33390b6 | ||
|
|
bf606bcc47 | ||
|
|
e91809957d | ||
|
|
313a7b86e8 | ||
|
|
9d7473487f | ||
|
|
2734a7d8d4 | ||
|
|
0f85808531 | ||
|
|
7c0afda0a6 | ||
|
|
dbe17249b7 | ||
|
|
cfe0baadd1 | ||
|
|
8d0143ad76 | ||
|
|
43fee26950 | ||
|
|
70145a4c86 | ||
|
|
f7a90e93c5 | ||
|
|
3e78ce212a | ||
|
|
5a87dec2d5 | ||
|
|
f4556406bd | ||
|
|
af6d1eff0c | ||
|
|
191993caef | ||
|
|
dbed948c73 | ||
|
|
8f232e4983 | ||
|
|
e961d3c995 | ||
|
|
699d5f40f5 | ||
|
|
4b58ecc041 | ||
|
|
4b8fb812fa | ||
|
|
78137c882d | ||
|
|
8033b05cb7 | ||
|
|
a1a0675976 | ||
|
|
a88748d798 | ||
|
|
c2fc09e324 | ||
|
|
be8e70225a | ||
|
|
92079ca200 | ||
|
|
52b13ccf58 | ||
|
|
d73c2e3602 | ||
|
|
39e301d98d | ||
|
|
a7d865eb5e | ||
|
|
cde39008cf | ||
|
|
ffaa7cae6f | ||
|
|
2ef9844219 | ||
|
|
6f13837890 | ||
|
|
1ca2f5c3f1 | ||
|
|
a5e209379c | ||
|
|
bcb5628840 | ||
|
|
0b27635d67 | ||
|
|
198ebaa7d8 | ||
|
|
bebb1b6ce4 | ||
|
|
f59a38575a | ||
|
|
da70b34f13 | ||
|
|
f4f24ec1a7 | ||
|
|
f1365c5c55 | ||
|
|
71364255ca | ||
|
|
22952c3ef0 | ||
|
|
27b3b604c9 | ||
|
|
bce280d02b | ||
|
|
4b6ab894bf | ||
|
|
38c1d7fc37 | ||
|
|
43bef87966 | ||
|
|
c2498d41a1 | ||
|
|
c4182262ce | ||
|
|
3149c6e35b | ||
|
|
6d662461b8 | ||
|
|
597d6825f7 | ||
|
|
8b060455c2 | ||
|
|
2bf60ac827 | ||
|
|
f3d6f405c9 | ||
|
|
bd985a6589 | ||
|
|
c1417c4e4b | ||
|
|
36515abad4 | ||
|
|
cba9088b5e | ||
|
|
a33aece85d | ||
|
|
076d334b3c | ||
|
|
e644692a8a | ||
|
|
1d547ec150 | ||
|
|
ce3066dc59 | ||
|
|
c11dcd19ee | ||
|
|
1d60cd7e98 | ||
|
|
4700b4dda2 | ||
|
|
91f1dc1c6a | ||
|
|
70d87a9f71 | ||
|
|
427b468891 | ||
|
|
30ed3350c7 | ||
|
|
78e04f0b42 | ||
|
|
f311bdfbb4 |
2
.github/workflows/project.yml
vendored
2
.github/workflows/project.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
Feedback-Provided:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && github.event.action == 'created' && contains(join(github.event.issue.labels.*.name, ', '), 'waiting-for-feedback')
|
||||
if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback')
|
||||
steps:
|
||||
- name: Update Project Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
|
||||
2
CI.adoc
2
CI.adoc
@@ -1,6 +1,6 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
|
||||
|
||||
60
Jenkinsfile
vendored
60
Jenkinsfile
vendored
@@ -3,7 +3,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -23,39 +23,39 @@ pipeline {
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0.23", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
stage('Publish JDK 8 + MongoDB 4.4') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
changeset "ci/openjdk8-mongodb-4.4/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.4.4", "ci/openjdk8-mongodb-4.4/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 15 + MongoDB 4.2') {
|
||||
stage('Publish JDK 16 + MongoDB 4.4') {
|
||||
when {
|
||||
changeset "ci/openjdk15-mongodb-4.2/**"
|
||||
changeset "ci/openjdk16-mongodb-4.4/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk15-with-mongodb-4.2.0", "ci/openjdk15-mongodb-4.2/")
|
||||
def image = docker.build("springci/spring-data-openjdk16-with-mongodb-4.4.4", "ci/openjdk16-mongodb-4.4/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
@@ -68,7 +68,7 @@ pipeline {
|
||||
stage("test: baseline (jdk8)") {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch 'main'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -76,16 +76,19 @@ pipeline {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.2.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -95,7 +98,7 @@ pipeline {
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
allOf {
|
||||
branch 'master'
|
||||
branch 'main'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -105,58 +108,67 @@ pipeline {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 4.2 (jdk8)") {
|
||||
stage("test: mongodb 4.4 (jdk8)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.2.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk15)") {
|
||||
stage("test: baseline (jdk16)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk15-with-mongodb-4.2.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk16-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -168,7 +180,7 @@ pipeline {
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
anyOf {
|
||||
branch 'master'
|
||||
branch 'main'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -185,7 +197,7 @@ pipeline {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' +
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
@@ -201,7 +213,7 @@ pipeline {
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch 'master'
|
||||
branch 'main'
|
||||
}
|
||||
agent {
|
||||
label 'data'
|
||||
@@ -216,7 +228,7 @@ pipeline {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' +
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
|
||||
202
LICENSE.txt
Normal file
202
LICENSE.txt
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
@@ -1,6 +1,6 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
|
||||
@@ -5,11 +5,11 @@ ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,15 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk15:latest
|
||||
FROM adoptopenjdk/openjdk16:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -10,6 +10,6 @@ RUN RUN set -eux; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
|
||||
apt-get install -y mongodb-org=4.0.23 mongodb-org-server=4.0.23 mongodb-org-shell=4.0.23 mongodb-org-mongos=4.0.23 mongodb-org-tools=4.0.23 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
@@ -5,11 +5,13 @@ ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
ln -T /bin/true /usr/bin/systemctl ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
rm /usr/bin/systemctl ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
13
pom.xml
13
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.0-M2</version>
|
||||
<version>3.3.0-M2</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.5.0-M2</version>
|
||||
<version>2.6.0-M2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.5.0-M2</springdata.commons>
|
||||
<mongo>4.1.1</mongo>
|
||||
<springdata.commons>2.6.0-M2</springdata.commons>
|
||||
<mongo>4.3.0</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
@@ -158,11 +158,6 @@
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
</pluginRepository>
|
||||
<pluginRepository>
|
||||
<id>bintray-plugins</id>
|
||||
<name>bintray-plugins</name>
|
||||
<url>https://jcenter.bintray.com</url>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
</project>
|
||||
|
||||
29
settings.xml
Normal file
29
settings.xml
Normal file
@@ -0,0 +1,29 @@
|
||||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
https://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
|
||||
<servers>
|
||||
<server>
|
||||
<id>spring-plugins-release</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-milestone</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
<server>
|
||||
<id>spring-libs-release</id>
|
||||
<username>${env.ARTIFACTORY_USR}</username>
|
||||
<password>${env.ARTIFACTORY_PSW}</password>
|
||||
</server>
|
||||
</servers>
|
||||
|
||||
</settings>
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.0-M2</version>
|
||||
<version>3.3.0-M2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.0-M2</version>
|
||||
<version>3.3.0-M2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.0-M2</version>
|
||||
<version>3.3.0-M2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,6 +87,13 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.DocumentCodec;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json})
|
||||
* expression. The expression will be wrapped within <code>{ ... }</code> if necessary. The actual parsing and parameter
|
||||
* binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via
|
||||
* {@link #toDocument()}.
|
||||
* <p />
|
||||
*
|
||||
* <pre class="code">
|
||||
* $toUpper : $name -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '$name' } -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '?0' }, "$name" -> { '$toUpper' : '$name' }
|
||||
* </pre>
|
||||
*
|
||||
* Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry}
|
||||
* containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
public class BindableMongoExpression implements MongoExpression {
|
||||
|
||||
private final String expressionString;
|
||||
|
||||
private final @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private final @Nullable Object[] args;
|
||||
|
||||
private final Lazy<Document> target;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable Object[] args) {
|
||||
this(expression, null, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param codecRegistryProvider can be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider,
|
||||
@Nullable Object[] args) {
|
||||
|
||||
this.expressionString = expression;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.args = args;
|
||||
this.target = Lazy.of(this::parse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link CodecRegistry} used to convert expressions.
|
||||
*
|
||||
* @param codecRegistry must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) {
|
||||
return new BindableMongoExpression(expressionString, () -> codecRegistry, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the arguments to bind to the placeholders via their index.
|
||||
*
|
||||
* @param args must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression bind(Object... args) {
|
||||
return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoExpression#toDocument()
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument() {
|
||||
return target.get();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
|
||||
+ Arrays.toString(args) + '}';
|
||||
}
|
||||
|
||||
private Document parse() {
|
||||
|
||||
String expression = wrapJsonIfNecessary(expressionString);
|
||||
|
||||
if (ObjectUtils.isEmpty(args)) {
|
||||
|
||||
if (codecRegistryProvider == null) {
|
||||
return Document.parse(expression);
|
||||
}
|
||||
|
||||
return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class)
|
||||
.orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry())));
|
||||
}
|
||||
|
||||
ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec()
|
||||
: new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry());
|
||||
return codec.decode(expression, args);
|
||||
}
|
||||
|
||||
private static String wrapJsonIfNecessary(String json) {
|
||||
|
||||
if (StringUtils.hasText(json) && (json.startsWith("{") && json.endsWith("}"))) {
|
||||
return json;
|
||||
}
|
||||
|
||||
return "{" + json + "}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
/**
|
||||
* Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when
|
||||
* passed on to the driver.
|
||||
* <p />
|
||||
* A set of predefined {@link MongoExpression expressions}, including a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method
|
||||
* like expressions (eg. {@code toUpper(name)}) are available via the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation Aggregation API}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArrayOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConvertOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.DateOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ObjectOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SetOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.StringOperators
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface MongoExpression {
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}). <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression) {
|
||||
return new BindableMongoExpression(expression, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0})
|
||||
* that will be resolved on first call of {@link #toDocument()}. <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression, Object... args) {
|
||||
return new BindableMongoExpression(expression, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the native {@link org.bson.Document} representation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
org.bson.Document toDocument();
|
||||
}
|
||||
@@ -26,7 +26,6 @@ import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
@@ -140,8 +139,7 @@ public abstract class MongoConfigurationSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
|
||||
* {@link Persistent}.
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}.
|
||||
*
|
||||
* @param basePackage must not be {@literal null}.
|
||||
* @return
|
||||
@@ -161,7 +159,6 @@ public abstract class MongoConfigurationSupport {
|
||||
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
|
||||
false);
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
|
||||
|
||||
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
@@ -36,6 +37,7 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -52,41 +54,46 @@ class AggregationUtil {
|
||||
|
||||
QueryMapper queryMapper;
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
Lazy<AggregationOperationContext> untypedMappingContext;
|
||||
|
||||
AggregationUtil(QueryMapper queryMapper,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.mappingContext = mappingContext;
|
||||
this.untypedMappingContext = Lazy
|
||||
.of(() -> new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the {@link AggregationOperationContext} for a given aggregation by either returning the context itself it
|
||||
* is not {@literal null}, create a {@link TypeBasedAggregationOperationContext} if the aggregation contains type
|
||||
* information (is a {@link TypedAggregation}) or use the {@link Aggregation#DEFAULT_CONTEXT}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param context can be {@literal null}.
|
||||
* @return the root {@link AggregationOperationContext} to use.
|
||||
*/
|
||||
AggregationOperationContext prepareAggregationContext(Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext context) {
|
||||
AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
|
||||
if (context != null) {
|
||||
return context;
|
||||
DomainTypeMapping domainTypeMapping = aggregation.getOptions().getDomainTypeMapping();
|
||||
|
||||
if (domainTypeMapping == DomainTypeMapping.NONE) {
|
||||
return Aggregation.DEFAULT_CONTEXT;
|
||||
}
|
||||
|
||||
if (!(aggregation instanceof TypedAggregation)) {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
Class<?> inputType = ((TypedAggregation) aggregation).getInputType();
|
||||
if(inputType == null) {
|
||||
return untypedMappingContext.get();
|
||||
}
|
||||
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
if (aggregation.getPipeline().containsUnionWith()) {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -125,53 +132,6 @@ class AggregationUtil {
|
||||
return command;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@code $count} aggregation for {@link Query} and optionally a {@link Class entity class}.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param entityClass can be {@literal null} if the {@link Query} object is empty.
|
||||
* @return the {@link Aggregation} pipeline definition to run a {@code $count} aggregation.
|
||||
*/
|
||||
Aggregation createCountAggregation(Query query, @Nullable Class<?> entityClass) {
|
||||
|
||||
List<AggregationOperation> pipeline = computeCountAggregationPipeline(query, entityClass);
|
||||
|
||||
Aggregation aggregation = entityClass != null ? Aggregation.newAggregation(entityClass, pipeline)
|
||||
: Aggregation.newAggregation(pipeline);
|
||||
aggregation.withOptions(AggregationOptions.builder().collation(query.getCollation().orElse(null)).build());
|
||||
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
private List<AggregationOperation> computeCountAggregationPipeline(Query query, @Nullable Class<?> entityType) {
|
||||
|
||||
CountOperation count = Aggregation.count().as("totalEntityCount");
|
||||
if (query.getQueryObject().isEmpty()) {
|
||||
return Collections.singletonList(count);
|
||||
}
|
||||
|
||||
Assert.notNull(entityType, "Entity type must not be null!");
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(),
|
||||
mappingContext.getPersistentEntity(entityType));
|
||||
|
||||
CriteriaDefinition criteria = new CriteriaDefinition() {
|
||||
|
||||
@Override
|
||||
public Document getCriteriaObject() {
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getKey() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return Arrays.asList(Aggregation.match(criteria), count);
|
||||
}
|
||||
|
||||
private List<Document> mapAggregationPipeline(List<Document> pipeline) {
|
||||
|
||||
return pipeline.stream().map(val -> queryMapper.getMappedObject(val, Optional.empty()))
|
||||
|
||||
@@ -17,8 +17,11 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.timeseries.GranularityDefinition;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -42,6 +45,7 @@ public class CollectionOptions {
|
||||
private @Nullable Boolean capped;
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
@@ -54,17 +58,19 @@ public class CollectionOptions {
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none());
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions) {
|
||||
@Nullable Collation collation, ValidationOptions validationOptions,
|
||||
@Nullable TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
this.capped = capped;
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
this.timeSeriesOptions = timeSeriesOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,7 +84,7 @@ public class CollectionOptions {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -88,7 +94,21 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use
|
||||
* {@link #timeSeries(TimeSeriesOptions)}.
|
||||
*
|
||||
* @param timeField The name of the property which contains the date in each time series document. Must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #timeSeries(TimeSeriesOptions)
|
||||
* @since 3.3
|
||||
*/
|
||||
public static CollectionOptions timeSeries(String timeField) {
|
||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -99,7 +119,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -110,7 +130,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -121,7 +141,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -132,7 +152,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -252,7 +272,20 @@ public class CollectionOptions {
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param timeSeriesOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -303,6 +336,16 @@ public class CollectionOptions {
|
||||
return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link TimeSeriesOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<TimeSeriesOptions> getTimeSeriesOptions() {
|
||||
return Optional.ofNullable(timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
@@ -398,4 +441,89 @@ public class CollectionOptions {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options applicable to Time Series collections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
public static class TimeSeriesOptions {
|
||||
|
||||
private final String timeField;
|
||||
|
||||
private @Nullable final String metaField;
|
||||
|
||||
private final GranularityDefinition granularity;
|
||||
|
||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||
|
||||
Assert.hasText(timeField, "Time field must not be empty or null!");
|
||||
|
||||
this.timeField = timeField;
|
||||
this.metaField = metaField;
|
||||
this.granularity = granularity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one,
|
||||
* that contains the date in each time series document. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param timeField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public static TimeSeriesOptions timeSeries(String timeField) {
|
||||
return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the name of the field which contains metadata in each time series document. Should not be the {@literal id}
|
||||
* nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or
|
||||
* {@link java.util.Collection}. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param metaField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public TimeSeriesOptions metaField(String metaField) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized.
|
||||
* Select one that is closest to the time span between incoming measurements.
|
||||
*
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
* @see Granularity
|
||||
*/
|
||||
public TimeSeriesOptions granularity(GranularityDefinition granularity) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getTimeField() {
|
||||
return timeField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via
|
||||
* {@link org.springframework.util.StringUtils#hasText(String)}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getMetaField() {
|
||||
return metaField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public GranularityDefinition getGranularity() {
|
||||
return granularity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper);
|
||||
public IndexOperations indexOps(String collectionName, Class<?> type) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
@@ -28,18 +29,23 @@ import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.TimeSeries;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
@@ -107,6 +113,20 @@ class EntityOperations {
|
||||
return AdaptibleMappedEntity.of(entity, context, conversionService);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source can be {@literal null}.
|
||||
* @return {@literal true} if the given value is an {@literal array}, {@link Collection} or {@link Iterator}.
|
||||
* @since 3.2
|
||||
*/
|
||||
static boolean isCollectionLike(@Nullable Object source) {
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ObjectUtils.isArray(source) || source instanceof Collection || source instanceof Iterator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entityClass should not be null.
|
||||
* @return the {@link MongoPersistentEntity#getCollection() collection name}.
|
||||
@@ -762,6 +782,24 @@ class EntityOperations {
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
|
||||
/**
|
||||
* Derive the applicable {@link CollectionOptions} for the given type.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
CollectionOptions getCollectionOptions();
|
||||
|
||||
/**
|
||||
* Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially
|
||||
* annotated field names.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -801,6 +839,16 @@ class EntityOperations {
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
return CollectionOptions.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) {
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -838,6 +886,58 @@ class EntityOperations {
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
|
||||
CollectionOptions collectionOptions = CollectionOptions.empty();
|
||||
if (entity.hasCollation()) {
|
||||
collectionOptions = collectionOptions.collation(entity.getCollation());
|
||||
}
|
||||
|
||||
if (entity.isAnnotationPresent(TimeSeries.class)) {
|
||||
|
||||
TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class);
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.timeField()) == null) {
|
||||
throw new MappingException(String.format("Time series field '%s' does not exist in type %s",
|
||||
timeSeries.timeField(), entity.getName()));
|
||||
}
|
||||
|
||||
TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField());
|
||||
if (StringUtils.hasText(timeSeries.metaField())) {
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.metaField()) == null) {
|
||||
throw new MappingException(
|
||||
String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName()));
|
||||
}
|
||||
|
||||
options = options.metaField(timeSeries.metaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(timeSeries.granularity())) {
|
||||
options = options.granularity(timeSeries.granularity());
|
||||
}
|
||||
collectionOptions = collectionOptions.timeSeries(options);
|
||||
}
|
||||
|
||||
return collectionOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) {
|
||||
|
||||
TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField()));
|
||||
|
||||
if (StringUtils.hasText(source.getMetaField())) {
|
||||
target = target.metaField(mappedNameOrDefault(source.getMetaField()));
|
||||
}
|
||||
return target.granularity(source.getGranularity());
|
||||
}
|
||||
|
||||
private String mappedNameOrDefault(String name) {
|
||||
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
|
||||
return persistentProperty != null ? persistentProperty.getFieldName() : name;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -125,6 +125,11 @@ public interface ExecutableFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <p />
|
||||
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation
|
||||
* execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard,
|
||||
* session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link MongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return total number of matching elements.
|
||||
*/
|
||||
|
||||
@@ -115,6 +115,10 @@ abstract class IndexConverters {
|
||||
ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class)));
|
||||
}
|
||||
|
||||
if (indexOptions.containsKey("wildcardProjection")) {
|
||||
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
||||
}
|
||||
|
||||
return ops;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bson.BsonInvalidOperationException;
|
||||
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.DataAccessResourceFailureException;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
@@ -39,6 +40,7 @@ import org.springframework.util.ClassUtils;
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.MongoServerException;
|
||||
import com.mongodb.MongoSocketException;
|
||||
import com.mongodb.bulk.BulkWriteError;
|
||||
|
||||
/**
|
||||
@@ -49,6 +51,7 @@ import com.mongodb.bulk.BulkWriteError;
|
||||
* @author Oliver Gierke
|
||||
* @author Michal Vich
|
||||
* @author Christoph Strobl
|
||||
* @author Brice Vandeputte
|
||||
*/
|
||||
public class MongoExceptionTranslator implements PersistenceExceptionTranslator {
|
||||
|
||||
@@ -78,6 +81,10 @@ public class MongoExceptionTranslator implements PersistenceExceptionTranslator
|
||||
throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
if (ex instanceof MongoSocketException) {
|
||||
return new DataAccessResourceFailureException(ex.getMessage(), ex);
|
||||
}
|
||||
|
||||
String exception = ClassUtils.getShortName(ClassUtils.getUserClass(ex.getClass()));
|
||||
|
||||
if (DUPLICATE_KEY_EXCEPTIONS.contains(exception)) {
|
||||
|
||||
@@ -1160,6 +1160,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1176,6 +1182,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1187,6 +1199,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
@@ -1200,6 +1215,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
@@ -1214,6 +1232,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1234,11 +1258,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <p/>
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T insert(T objectToSave);
|
||||
|
||||
@@ -1249,10 +1275,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <p/>
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T insert(T objectToSave, String collectionName);
|
||||
|
||||
@@ -1295,9 +1324,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T save(T objectToSave);
|
||||
|
||||
@@ -1313,10 +1345,13 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T save(T objectToSave, String collectionName);
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@ import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
@@ -55,6 +56,7 @@ import org.springframework.data.mongodb.SessionSynchronization;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DeleteContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext;
|
||||
@@ -97,12 +99,12 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.jca.cci.core.ConnectionCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -157,22 +159,12 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Yadhukrishna S Pai
|
||||
* @author Anton Barkan
|
||||
* @author Bartłomiej Mazur
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
|
||||
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
|
||||
private static final Collection<String> ITERABLE_CLASSES;
|
||||
|
||||
static {
|
||||
|
||||
Set<String> iterableClasses = new HashSet<>();
|
||||
iterableClasses.add(List.class.getName());
|
||||
iterableClasses.add(Collection.class.getName());
|
||||
iterableClasses.add(Iterator.class.getName());
|
||||
|
||||
ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses);
|
||||
}
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -606,7 +598,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> MongoCollection<Document> createCollection(Class<T> entityClass) {
|
||||
return createCollection(entityClass, CollectionOptions.empty());
|
||||
return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -715,12 +707,17 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return indexOps(collectionName, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.String)
|
||||
*/
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return new DefaultIndexOperations(this, collectionName, null);
|
||||
public IndexOperations indexOps(String collectionName, @Nullable Class<?> type) {
|
||||
return new DefaultIndexOperations(this, collectionName, type);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -728,7 +725,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.Class)
|
||||
*/
|
||||
public IndexOperations indexOps(Class<?> entityClass) {
|
||||
return new DefaultIndexOperations(this, getCollectionName(entityClass), entityClass);
|
||||
return indexOps(getCollectionName(entityClass), entityClass);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -977,7 +974,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
for (Document element : results) {
|
||||
|
||||
GeoResult<T> geoResult = callback.doWith(element);
|
||||
aggregate = aggregate.add(new BigDecimal(geoResult.getDistance().getValue()));
|
||||
aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue()));
|
||||
result.add(geoResult);
|
||||
}
|
||||
|
||||
@@ -1177,11 +1174,28 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return (T) doInsert(collectionName, objectToSave, this.mongoConverter);
|
||||
}
|
||||
|
||||
protected void ensureNotIterable(@Nullable Object o) {
|
||||
if (o != null) {
|
||||
if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
}
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @deprecated since 3.2. Call {@link #ensureNotCollectionLike(Object)} instead.
|
||||
*/
|
||||
protected void ensureNotIterable(@Nullable Object source) {
|
||||
ensureNotCollectionLike(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @since 3.2.
|
||||
*/
|
||||
protected void ensureNotCollectionLike(@Nullable Object source) {
|
||||
|
||||
if (EntityOperations.isCollectionLike(source)) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1364,6 +1378,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
Assert.notNull(objectToSave, "Object to save must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
ensureNotCollectionLike(objectToSave);
|
||||
|
||||
AdaptibleEntity<T> source = operations.forEntity(objectToSave, mongoConverter.getConversionService());
|
||||
|
||||
@@ -1988,7 +2003,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public <O> AggregationResults<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregate(aggregation, getCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext());
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -2095,9 +2110,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
|
||||
AggregationOperationContext contextToUse = new AggregationUtil(queryMapper, mappingContext)
|
||||
.prepareAggregationContext(aggregation, context);
|
||||
return doAggregate(aggregation, collectionName, outputType, contextToUse);
|
||||
return doAggregate(aggregation, collectionName, outputType,
|
||||
queryOperations.createAggregation(aggregation, context));
|
||||
}
|
||||
|
||||
private <O> AggregationResults<O> doAggregate(Aggregation aggregation, String collectionName, Class<O> outputType,
|
||||
AggregationDefinition context) {
|
||||
return doAggregate(aggregation, collectionName, outputType, context.getAggregationOperationContext());
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@@ -2185,11 +2204,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming!");
|
||||
|
||||
AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
AggregationOperationContext rootContext = aggregationUtil.prepareAggregationContext(aggregation, context);
|
||||
AggregationDefinition aggregationDefinition = queryOperations.createAggregation(aggregation, context);
|
||||
|
||||
AggregationOptions options = aggregation.getOptions();
|
||||
List<Document> pipeline = aggregationUtil.createPipeline(aggregation, rootContext);
|
||||
List<Document> pipeline = aggregationDefinition.getAggregationPipeline();
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName);
|
||||
@@ -2418,6 +2436,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
co.validationOptions(options);
|
||||
}
|
||||
|
||||
if (collectionOptions.containsKey("timeseries")) {
|
||||
|
||||
Document timeSeries = collectionOptions.get("timeseries", Document.class);
|
||||
com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(
|
||||
timeSeries.getString("timeField"));
|
||||
if (timeSeries.containsKey("metaField")) {
|
||||
options.metaField(timeSeries.getString("metaField"));
|
||||
}
|
||||
if (timeSeries.containsKey("granularity")) {
|
||||
options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase()));
|
||||
}
|
||||
co.timeSeriesOptions(options);
|
||||
}
|
||||
|
||||
db.createCollection(collectionName, co);
|
||||
|
||||
MongoCollection<Document> coll = db.getCollection(collectionName, Document.class);
|
||||
@@ -2572,6 +2604,19 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() //
|
||||
.ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType))));
|
||||
|
||||
collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions)
|
||||
.ifPresent(it -> {
|
||||
|
||||
Document timeseries = new Document("timeField", it.getTimeField());
|
||||
if (StringUtils.hasText(it.getMetaField())) {
|
||||
timeseries.append("metaField", it.getMetaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(it.getGranularity())) {
|
||||
timeseries.append("granularity", it.getGranularity().name().toLowerCase());
|
||||
}
|
||||
doc.put("timeseries", timeseries);
|
||||
});
|
||||
}
|
||||
|
||||
return doc;
|
||||
@@ -2735,25 +2780,24 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* Internal method using callbacks to do queries against the datastore that requires reading a single object from a
|
||||
* collection of objects. It will take the following steps
|
||||
* <ol>
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link Document}.</li>
|
||||
* <li>Execute the given {@link CollectionCallback} for a {@link Document}.</li>
|
||||
* <li>Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.</li>
|
||||
* <ol>
|
||||
*
|
||||
* @param <T>
|
||||
* @param collectionCallback the callback to retrieve the {@link Document} with
|
||||
* @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private <T> T executeFindOneInternal(CollectionCallback<Document> collectionCallback,
|
||||
DocumentCallback<T> objectCallback, String collectionName) {
|
||||
DocumentCallback<T> documentCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
T result = objectCallback
|
||||
.doWith(collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)));
|
||||
return result;
|
||||
Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName));
|
||||
return document != null ? documentCallback.doWith(document) : null;
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
@@ -2763,7 +2807,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* Internal method using callback to do queries against the datastore that requires reading a collection of objects.
|
||||
* It will take the following steps
|
||||
* <ol>
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link FindIterable}.</li>
|
||||
* <li>Execute the given {@link CollectionCallback} for a {@link FindIterable}.</li>
|
||||
* <li>Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if
|
||||
* {@link CursorPreparer} is {@literal null}</li>
|
||||
* <li>Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the
|
||||
@@ -2773,36 +2817,27 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @param <T>
|
||||
* @param collectionCallback the callback to retrieve the {@link FindIterable} with
|
||||
* @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it
|
||||
* @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
private <T> List<T> executeFindMultiInternal(CollectionCallback<FindIterable<Document>> collectionCallback,
|
||||
CursorPreparer preparer, DocumentCallback<T> objectCallback, String collectionName) {
|
||||
CursorPreparer preparer, DocumentCallback<T> documentCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
MongoCursor<Document> cursor = null;
|
||||
|
||||
try {
|
||||
|
||||
cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator();
|
||||
try (MongoCursor<Document> cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator()) {
|
||||
|
||||
List<T> result = new ArrayList<>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
Document object = cursor.next();
|
||||
result.add(objectCallback.doWith(object));
|
||||
result.add(documentCallback.doWith(object));
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
|
||||
if (cursor != null) {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -2812,23 +2847,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private void executeQueryInternal(CollectionCallback<FindIterable<Document>> collectionCallback,
|
||||
CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) {
|
||||
|
||||
try {
|
||||
try (MongoCursor<Document> cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator()) {
|
||||
|
||||
MongoCursor<Document> cursor = null;
|
||||
|
||||
try {
|
||||
|
||||
cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
callbackHandler.processDocument(cursor.next());
|
||||
}
|
||||
} finally {
|
||||
if (cursor != null) {
|
||||
cursor.close();
|
||||
}
|
||||
while (cursor.hasNext()) {
|
||||
callbackHandler.processDocument(cursor.next());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -3127,8 +3151,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
interface DocumentCallback<T> {
|
||||
|
||||
@Nullable
|
||||
T doWith(@Nullable Document object);
|
||||
T doWith(Document object);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3152,22 +3175,19 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document document) {
|
||||
public T doWith(Document document) {
|
||||
|
||||
T source = null;
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
T entity = reader.read(type, document);
|
||||
|
||||
if (document != null) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
source = reader.read(type, document);
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
source = maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
entity = maybeCallAfterConvert(entity, document, collectionName);
|
||||
|
||||
return source;
|
||||
return entity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3200,8 +3220,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document document) {
|
||||
public T doWith(Document document) {
|
||||
|
||||
if (document == null) {
|
||||
return null;
|
||||
@@ -3212,15 +3231,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName));
|
||||
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
Object entity = reader.read(typeToRead, document);
|
||||
|
||||
if (result != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
|
||||
result = maybeCallAfterConvert(result, document, collectionName);
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
|
||||
return (T) result;
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity;
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
|
||||
return (T) maybeCallAfterConvert(result, document, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3357,8 +3377,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.metric = metric;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public GeoResult<T> doWith(@Nullable Document object) {
|
||||
public GeoResult<T> doWith(Document object) {
|
||||
|
||||
double distance = Double.NaN;
|
||||
if (object.containsKey(distanceField)) {
|
||||
@@ -3385,10 +3404,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
/**
|
||||
* Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}.
|
||||
*
|
||||
* @param cursor
|
||||
* @param exceptionTranslator
|
||||
* @param objectReadCallback
|
||||
*/
|
||||
CloseableIterableCursorAdapter(MongoIterable<Document> cursor, PersistenceExceptionTranslator exceptionTranslator,
|
||||
DocumentCallback<T> objectReadCallback) {
|
||||
@@ -3432,8 +3447,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
try {
|
||||
Document item = cursor.next();
|
||||
T converted = objectReadCallback.doWith(item);
|
||||
return converted;
|
||||
return objectReadCallback.doWith(item);
|
||||
} catch (RuntimeException ex) {
|
||||
throw potentiallyConvertRuntimeException(ex, exceptionTranslator);
|
||||
}
|
||||
@@ -3459,7 +3473,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated since 3.1.4. Use {@link #getMongoDatabaseFactory()} instead.
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoDatabaseFactory getMongoDbFactory() {
|
||||
return getMongoDatabaseFactory();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
* @since 3.1.4
|
||||
*/
|
||||
public MongoDatabaseFactory getMongoDatabaseFactory() {
|
||||
return mongoDbFactory;
|
||||
}
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@@ -31,11 +32,17 @@ import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoExpression;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationExpression;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
@@ -48,6 +55,7 @@ import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -194,9 +202,34 @@ class QueryOperations {
|
||||
return new DeleteContext(query, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param inputType fallback mapping type in case of untyped aggregation. Can be {@literal null}.
|
||||
* @return new instance of {@link AggregationDefinition}.
|
||||
* @since 3.2
|
||||
*/
|
||||
AggregationDefinition createAggregation(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
return new AggregationDefinition(aggregation, inputType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param aggregationOperationContext the {@link AggregationOperationContext} to use. Can be {@literal null}.
|
||||
* @return new instance of {@link AggregationDefinition}.
|
||||
* @since 3.2
|
||||
*/
|
||||
AggregationDefinition createAggregation(Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext aggregationOperationContext) {
|
||||
return new AggregationDefinition(aggregation, aggregationOperationContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
|
||||
* representation, mapping fieldnames, as well as determinging and applying {@link Collation collations}.
|
||||
* representation, mapping field names, as well as determining and applying {@link Collation collations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@@ -205,7 +238,7 @@ class QueryOperations {
|
||||
private final Query query;
|
||||
|
||||
/**
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be eihter a {@link Query} or a
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be either a {@link Query} or a
|
||||
* plain {@link Document}.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
@@ -258,7 +291,21 @@ class QueryOperations {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
Document fields = new Document();
|
||||
|
||||
for (Entry<String, Object> entry : query.getFieldsObject().entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof MongoExpression) {
|
||||
|
||||
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
|
||||
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
|
||||
|
||||
fields.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||
} else {
|
||||
fields.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
@@ -275,7 +322,7 @@ class QueryOperations {
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity != null && entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
if (entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
@@ -341,7 +388,8 @@ class QueryOperations {
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType, ProjectionFactory projectionFactory) {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
@@ -658,7 +706,8 @@ class QueryOperations {
|
||||
: mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType);
|
||||
|
||||
Document filterWithShardKey = new Document(filter);
|
||||
getMappedShardKeyFields(domainType).forEach(key -> filterWithShardKey.putIfAbsent(key, shardKeySource.get(key)));
|
||||
getMappedShardKeyFields(domainType)
|
||||
.forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue(shardKeySource, key)));
|
||||
|
||||
return filterWithShardKey;
|
||||
}
|
||||
@@ -709,7 +758,8 @@ class QueryOperations {
|
||||
|
||||
Class<?> type = domainType != null ? domainType : Object.class;
|
||||
|
||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, queryMapper);
|
||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext,
|
||||
queryMapper);
|
||||
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
|
||||
}
|
||||
|
||||
@@ -759,4 +809,105 @@ class QueryOperations {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A value object that encapsulates common tasks required when running {@literal aggregations}.
|
||||
*
|
||||
* @since 3.2
|
||||
*/
|
||||
class AggregationDefinition {
|
||||
|
||||
private final Aggregation aggregation;
|
||||
private final Lazy<AggregationOperationContext> aggregationOperationContext;
|
||||
private final Lazy<List<Document>> pipeline;
|
||||
private final @Nullable Class<?> inputType;
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link AggregationDefinition} extracting the input type from either the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
|
||||
* the given {@literal aggregationOperationContext} if present. <br />
|
||||
* Creates a new {@link AggregationOperationContext} if none given, based on the {@link Aggregation} input type and
|
||||
* the desired {@link AggregationOptions#getDomainTypeMapping() domain type mapping}. <br />
|
||||
* Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
|
||||
*
|
||||
* @param aggregation the source aggregation.
|
||||
* @param aggregationOperationContext can be {@literal null}.
|
||||
*/
|
||||
AggregationDefinition(Aggregation aggregation, @Nullable AggregationOperationContext aggregationOperationContext) {
|
||||
|
||||
this.aggregation = aggregation;
|
||||
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
} else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext) {
|
||||
this.inputType = ((TypeBasedAggregationOperationContext) aggregationOperationContext).getType();
|
||||
} else {
|
||||
this.inputType = null;
|
||||
}
|
||||
|
||||
this.aggregationOperationContext = Lazy.of(() -> aggregationOperationContext != null ? aggregationOperationContext
|
||||
: aggregationUtil.createAggregationContext(aggregation, getInputType()));
|
||||
this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link AggregationDefinition} extracting the input type from either the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
|
||||
* the given {@literal aggregationOperationContext} if present. <br />
|
||||
* Creates a new {@link AggregationOperationContext} based on the {@link Aggregation} input type and the desired
|
||||
* {@link AggregationOptions#getDomainTypeMapping() domain type mapping}. <br />
|
||||
* Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
|
||||
*
|
||||
* @param aggregation the source aggregation.
|
||||
* @param inputType can be {@literal null}.
|
||||
*/
|
||||
AggregationDefinition(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
|
||||
this.aggregation = aggregation;
|
||||
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
} else {
|
||||
this.inputType = inputType;
|
||||
}
|
||||
|
||||
this.aggregationOperationContext = Lazy
|
||||
.of(() -> aggregationUtil.createAggregationContext(aggregation, getInputType()));
|
||||
this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the already mapped pipeline.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getAggregationPipeline() {
|
||||
return pipeline.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}.
|
||||
* @see AggregationPipeline#isOutOrMerge()
|
||||
*/
|
||||
boolean isOutOrMerge() {
|
||||
return aggregation.getPipeline().isOutOrMerge();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link AggregationOperationContext} used for mapping the pipeline.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
AggregationOperationContext getAggregationOperationContext() {
|
||||
return aggregationOperationContext.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the input type to map the pipeline against. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
Class<?> getInputType() {
|
||||
return inputType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,6 +106,12 @@ public interface ReactiveFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but
|
||||
* guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications
|
||||
* needs use {@link ReactiveMongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return {@link Mono} emitting total number of matching elements. Never {@literal null}.
|
||||
*/
|
||||
|
||||
@@ -940,6 +940,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -956,6 +962,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -971,6 +983,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -983,6 +1001,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting the estimated number of documents.
|
||||
@@ -996,6 +1017,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting the estimated number of documents.
|
||||
@@ -1014,11 +1038,13 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> insert(T objectToSave);
|
||||
|
||||
@@ -1029,10 +1055,13 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> insert(T objectToSave, String collectionName);
|
||||
|
||||
@@ -1074,7 +1103,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1121,9 +1149,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> save(T objectToSave);
|
||||
|
||||
@@ -1143,6 +1174,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> save(T objectToSave, String collectionName);
|
||||
|
||||
|
||||
@@ -17,13 +17,20 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
@@ -53,6 +60,7 @@ import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -62,6 +70,7 @@ import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.SessionSynchronization;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DeleteContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext;
|
||||
@@ -71,6 +80,7 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
@@ -100,6 +110,7 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
@@ -156,18 +167,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class);
|
||||
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
|
||||
private static final Collection<Class<?>> ITERABLE_CLASSES;
|
||||
|
||||
static {
|
||||
|
||||
Set<Class<?>> iterableClasses = new HashSet<>();
|
||||
iterableClasses.add(List.class);
|
||||
iterableClasses.add(Collection.class);
|
||||
iterableClasses.add(Iterator.class);
|
||||
iterableClasses.add(Publisher.class);
|
||||
|
||||
ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses);
|
||||
}
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -667,7 +666,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<MongoCollection<Document>> createCollection(Class<T> entityClass) {
|
||||
return createCollection(entityClass, CollectionOptions.empty());
|
||||
return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -946,9 +945,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
|
||||
AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(),
|
||||
mappingContext, queryMapper);
|
||||
return aggregate(aggregation, inputCollectionName, outputType, context);
|
||||
return doAggregate(aggregation, inputCollectionName, aggregation.getInputType(), outputType);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -966,9 +963,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <O> Flux<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregate(aggregation, getCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
return doAggregate(aggregation, getCollectionName(inputType), inputType, outputType);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -977,45 +972,34 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <O> Flux<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType) {
|
||||
return aggregate(aggregation, collectionName, outputType, null);
|
||||
return doAggregate(aggregation, collectionName, null, outputType);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @param outputType must not be {@literal null}.
|
||||
* @param context can be {@literal null} and will be defaulted to {@link Aggregation#DEFAULT_CONTEXT}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
protected <O> Flux<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType,
|
||||
@Nullable AggregationOperationContext context) {
|
||||
protected <O> Flux<O> doAggregate(Aggregation aggregation, String collectionName, @Nullable Class<?> inputType,
|
||||
Class<O> outputType) {
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
|
||||
AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
AggregationOperationContext rootContext = aggregationUtil.prepareAggregationContext(aggregation, context);
|
||||
|
||||
AggregationOptions options = aggregation.getOptions();
|
||||
List<Document> pipeline = aggregationUtil.createPipeline(aggregation, rootContext);
|
||||
|
||||
Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming!");
|
||||
|
||||
AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName);
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()),
|
||||
collectionName);
|
||||
}
|
||||
|
||||
ReadDocumentCallback<O> readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName);
|
||||
return execute(collectionName,
|
||||
collection -> aggregateAndMap(collection, pipeline, aggregation.getPipeline().isOutOrMerge(), options,
|
||||
readCallback,
|
||||
aggregation instanceof TypedAggregation ? ((TypedAggregation<?>) aggregation).getInputType() : null));
|
||||
return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(),
|
||||
ctx.isOutOrMerge(), options, readCallback, ctx.getInputType()));
|
||||
}
|
||||
|
||||
private <O> Flux<O> aggregateAndMap(MongoCollection<Document> collection, List<Document> pipeline,
|
||||
boolean isOutOrMerge,
|
||||
AggregationOptions options, ReadDocumentCallback<O> readCallback, @Nullable Class<?> inputType) {
|
||||
boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback<O> readCallback,
|
||||
@Nullable Class<?> inputType) {
|
||||
|
||||
AggregatePublisher<Document> cursor = collection.aggregate(pipeline, Document.class)
|
||||
.allowDiskUse(options.isAllowDiskUse());
|
||||
@@ -2522,6 +2506,20 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
result.validationOptions(validationOptions);
|
||||
});
|
||||
|
||||
collectionOptions.getTimeSeriesOptions().map(operations.forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> {
|
||||
|
||||
TimeSeriesOptions options = new TimeSeriesOptions(it.getTimeField());
|
||||
|
||||
if (StringUtils.hasText(it.getMetaField())) {
|
||||
options.metaField(it.getMetaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(it.getGranularity())) {
|
||||
options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase()));
|
||||
}
|
||||
|
||||
result.timeSeriesOptions(options);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -2682,13 +2680,27 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}
|
||||
}
|
||||
|
||||
protected void ensureNotIterable(Object o) {
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @deprecated since 3.2. Call {@link #ensureNotCollectionLike(Object)} instead.
|
||||
*/
|
||||
protected void ensureNotIterable(@Nullable Object source) {
|
||||
ensureNotCollectionLike(source);
|
||||
}
|
||||
|
||||
boolean isIterable = o.getClass().isArray()
|
||||
|| ITERABLE_CLASSES.stream().anyMatch(iterableClass -> iterableClass.isAssignableFrom(o.getClass())
|
||||
|| o.getClass().getName().equals(iterableClass.getName()));
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @since 3.2.
|
||||
*/
|
||||
protected void ensureNotCollectionLike(@Nullable Object source) {
|
||||
|
||||
if (isIterable) {
|
||||
if (EntityOperations.isCollectionLike(source) || source instanceof Publisher) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
}
|
||||
}
|
||||
@@ -2730,6 +2742,14 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return potentiallyForceAcknowledgedWrite(wc);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
* @since 3.1.4
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getMongoDatabaseFactory() {
|
||||
return mongoDatabaseFactory;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) {
|
||||
|
||||
@@ -3157,13 +3177,14 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
|
||||
T source = reader.read(type, document);
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
return maybeCallAfterConvert(source, document, collectionName);
|
||||
T entity = reader.read(type, document);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
return maybeCallAfterConvert(entity, document, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3201,16 +3222,17 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, typeToRead, collectionName));
|
||||
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
Object entity = reader.read(typeToRead, document);
|
||||
|
||||
T castEntity = (T) result;
|
||||
if (castEntity != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity;
|
||||
|
||||
T castEntity = (T) result;
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.MongoExpression;
|
||||
|
||||
/**
|
||||
* An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like
|
||||
@@ -25,7 +26,37 @@ import org.bson.Document;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface AggregationExpression {
|
||||
public interface AggregationExpression extends MongoExpression {
|
||||
|
||||
/**
|
||||
* Create an {@link AggregationExpression} out of a given {@link MongoExpression} to ensure the resulting
|
||||
* {@link MongoExpression#toDocument() Document} is mapped against the {@link AggregationOperationContext}. <br />
|
||||
* If the given expression is already an {@link AggregationExpression} the very same instance is returned.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.2
|
||||
*/
|
||||
static AggregationExpression from(MongoExpression expression) {
|
||||
|
||||
if (expression instanceof AggregationExpression) {
|
||||
return AggregationExpression.class.cast(expression);
|
||||
}
|
||||
|
||||
return (context) -> context.getMappedObject(expression.toDocument());
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the as is (unmapped) representation of the {@link AggregationExpression}. Use
|
||||
* {@link #toDocument(AggregationOperationContext)} with a matching {@link AggregationOperationContext context} to
|
||||
* engage domain type mapping including field name resolution.
|
||||
*
|
||||
* @see org.springframework.data.mongodb.MongoExpression#toDocument()
|
||||
*/
|
||||
@Override
|
||||
default Document toDocument() {
|
||||
return toDocument(Aggregation.DEFAULT_CONTEXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationExpression} into a {@link Document} within the given
|
||||
|
||||
@@ -56,6 +56,7 @@ public class AggregationOptions {
|
||||
private final Optional<Document> hint;
|
||||
private Duration maxTime = Duration.ZERO;
|
||||
private ResultOptions resultOptions = ResultOptions.READ;
|
||||
private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
@@ -261,6 +262,14 @@ public class AggregationOptions {
|
||||
return ResultOptions.SKIP.equals(resultOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the domain type mapping strategy do apply. Never {@literal null}.
|
||||
* @since 3.2
|
||||
*/
|
||||
public DomainTypeMapping getDomainTypeMapping() {
|
||||
return domainTypeMapping;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration
|
||||
* applied.
|
||||
@@ -358,6 +367,7 @@ public class AggregationOptions {
|
||||
private @Nullable Document hint;
|
||||
private @Nullable Duration maxTime;
|
||||
private @Nullable ResultOptions resultOptions;
|
||||
private @Nullable DomainTypeMapping domainTypeMapping;
|
||||
|
||||
/**
|
||||
* Defines whether to off-load intensive sort-operations to disk.
|
||||
@@ -475,6 +485,44 @@ public class AggregationOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a strict domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field}
|
||||
* annotations throwing errors for non-existent, but referenced fields.
|
||||
*
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Builder strictMapping() {
|
||||
|
||||
this.domainTypeMapping = DomainTypeMapping.STRICT;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a relaxed domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field}
|
||||
* annotations using the user provided name if a referenced field does not exist.
|
||||
*
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Builder relaxedMapping() {
|
||||
|
||||
this.domainTypeMapping = DomainTypeMapping.RELAXED;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply no domain type mapping at all taking the pipeline as-is.
|
||||
*
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Builder noMapping() {
|
||||
|
||||
this.domainTypeMapping = DomainTypeMapping.NONE;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions} instance with the given configuration.
|
||||
*
|
||||
@@ -489,6 +537,9 @@ public class AggregationOptions {
|
||||
if (resultOptions != null) {
|
||||
options.resultOptions = resultOptions;
|
||||
}
|
||||
if (domainTypeMapping != null) {
|
||||
options.domainTypeMapping = domainTypeMapping;
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
@@ -508,4 +559,27 @@ public class AggregationOptions {
|
||||
*/
|
||||
READ;
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregation pipeline Domain type mappings supported by the mapping layer.
|
||||
*
|
||||
* @since 3.2
|
||||
*/
|
||||
public enum DomainTypeMapping {
|
||||
|
||||
/**
|
||||
* Mapping throws errors for non-existent, but referenced fields.
|
||||
*/
|
||||
STRICT,
|
||||
|
||||
/**
|
||||
* Fields that do not exist in the model are treated as-is.
|
||||
*/
|
||||
RELAXED,
|
||||
|
||||
/**
|
||||
* Do not attempt to map fields against the model and treat the entire pipeline as-is.
|
||||
*/
|
||||
NONE
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.context.InvalidPersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
@@ -55,7 +56,7 @@ public class RelaxedTypeBasedAggregationOperationContext extends TypeBasedAggreg
|
||||
|
||||
try {
|
||||
return super.getReferenceFor(field);
|
||||
} catch (InvalidPersistentPropertyPath e) {
|
||||
} catch (MappingException e) {
|
||||
return new DirectFieldReference(new ExposedField(field, true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -166,4 +166,8 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
|
||||
return new DirectFieldReference(new ExposedField(mappedField, true));
|
||||
}
|
||||
|
||||
public Class<?> getType() {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mongodb.DBRef;
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
public interface DbRefResolver {
|
||||
public interface DbRefResolver extends ReferenceResolver {
|
||||
|
||||
/**
|
||||
* Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method
|
||||
|
||||
@@ -15,13 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@@ -29,28 +22,18 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
@@ -67,13 +50,11 @@ import com.mongodb.client.model.Filters;
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
public class DefaultDbRefResolver implements DbRefResolver {
|
||||
public class DefaultDbRefResolver extends DefaultReferenceResolver implements DbRefResolver, ReferenceResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDbRefResolver.class);
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}.
|
||||
@@ -82,11 +63,11 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) {
|
||||
|
||||
super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory), mongoDbFactory.getExceptionTranslator());
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
this.objenesis = new ObjenesisStd(true);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -114,17 +95,8 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
@Override
|
||||
public Document fetch(DBRef dbRef) {
|
||||
|
||||
MongoCollection<Document> mongoCollection = getCollection(dbRef);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Fetching DBRef '{}' from {}.{}.", dbRef.getId(),
|
||||
StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName()
|
||||
: mongoCollection.getNamespace().getDatabaseName(),
|
||||
dbRef.getCollectionName());
|
||||
}
|
||||
|
||||
return mongoCollection.find(Filters.eq("_id", dbRef.getId())).first();
|
||||
return getReferenceLoader().fetchOne(DocumentReferenceQuery.forSingleDocument(Filters.eq("_id", dbRef.getId())),
|
||||
ReferenceCollection.fromDBRef(dbRef));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -165,7 +137,7 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
}
|
||||
|
||||
List<Document> result = mongoCollection //
|
||||
.find(new Document("_id", new Document("$in", ids))) //
|
||||
.find(new Document(BasicMongoPersistentProperty.ID_FIELD_NAME, new Document("$in", ids))) //
|
||||
.into(new ArrayList<>());
|
||||
|
||||
return ids.stream() //
|
||||
@@ -185,44 +157,9 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, @Nullable DBRef dbref,
|
||||
DbRefResolverCallback callback, DbRefProxyHandler handler) {
|
||||
|
||||
Class<?> propertyType = property.getType();
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback);
|
||||
Object lazyLoadingProxy = getProxyFactory().createLazyLoadingProxy(property, callback, dbref);
|
||||
|
||||
if (!propertyType.isInterface()) {
|
||||
|
||||
Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType));
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
|
||||
return handler.populateId(property, dbref, factory);
|
||||
}
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
|
||||
return handler.populateId(property, dbref, proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CGLib enhanced type for the given source type.
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getEnhancedTypeFor(Class<?> type) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
return enhancer.createClass();
|
||||
return handler.populateId(property, dbref, lazyLoadingProxy);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -245,253 +182,10 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
private static Stream<Document> documentWithId(Object identifier, Collection<Document> documents) {
|
||||
|
||||
return documents.stream() //
|
||||
.filter(it -> it.get("_id").equals(identifier)) //
|
||||
.filter(it -> it.get(BasicMongoPersistentProperty.ID_FIELD_NAME).equals(identifier)) //
|
||||
.limit(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link MethodInterceptor} that is used within a lazy loading proxy. The property resolving is delegated to a
|
||||
* {@link DbRefResolverCallback}. The resolving process is triggered by a method invocation on the proxy and is
|
||||
* guaranteed to be performed only once.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD;
|
||||
|
||||
private final DbRefResolverCallback callback;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private volatile boolean resolved;
|
||||
private final @Nullable DBRef dbref;
|
||||
private @Nullable Object result;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty},
|
||||
* {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param dbref can be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
*/
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, @Nullable DBRef dbref,
|
||||
PersistenceExceptionTranslator exceptionTranslator, DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "Exception translator must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
this.dbref = dbref;
|
||||
this.callback = callback;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Override
|
||||
public Object invoke(@Nullable MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.cglib.proxy.MethodInterceptor#intercept(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], org.springframework.cglib.proxy.MethodProxy)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public Object intercept(Object obj, Method method, Object[] args, @Nullable MethodProxy proxy) throws Throwable {
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return this.dbref;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(proxy);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(proxy, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode(proxy);
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a to string representation for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private String proxyToString(@Nullable Object proxy) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (dbref != null) {
|
||||
description.append(dbref.getCollectionName());
|
||||
description.append(":");
|
||||
description.append(dbref.getId());
|
||||
} else {
|
||||
description.append(System.identityHashCode(proxy));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hashcode for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private int proxyHashCode(@Nullable Object proxy) {
|
||||
return proxyToString(proxy).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an equality check for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @param that
|
||||
* @return
|
||||
*/
|
||||
private boolean proxyEquals(@Nullable Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Will trigger the resolution if the proxy is not resolved already or return a previously resolved result.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the proxy into its backing object.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (resolved) {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Accessing already resolved lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Resolving lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
|
||||
if (translatedException instanceof ClientSessionException) {
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex);
|
||||
}
|
||||
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook for obtaining the {@link MongoCollection} for a given {@link DBRef}.
|
||||
*
|
||||
@@ -504,4 +198,10 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
return MongoDatabaseUtils.getDatabase(dbref.getDatabaseName(), mongoDbFactory)
|
||||
.getCollection(dbref.getCollectionName(), Document.class);
|
||||
}
|
||||
|
||||
protected MongoCollection<Document> getCollection(ReferenceCollection context) {
|
||||
|
||||
return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(),
|
||||
Document.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ReferenceResolver} implementation that uses a given {@link ReferenceLookupDelegate} to load and convert entity
|
||||
* associations expressed via a {@link MongoPersistentProperty persitent property}. Creates {@link LazyLoadingProxy
|
||||
* proxies} for associations that should be lazily loaded.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.3
|
||||
*/
|
||||
public class DefaultReferenceResolver implements ReferenceResolver {
|
||||
|
||||
private final ReferenceLoader referenceLoader;
|
||||
private final LazyLoadingProxyFactory proxyFactory;
|
||||
|
||||
private final LookupFunction collectionLookupFunction = (filter, ctx) -> getReferenceLoader().fetchMany(filter, ctx);
|
||||
private final LookupFunction singleValueLookupFunction = (filter, ctx) -> {
|
||||
Object target = getReferenceLoader().fetchOne(filter, ctx);
|
||||
return target == null ? Collections.emptyList() : Collections.singleton(getReferenceLoader().fetchOne(filter, ctx));
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DefaultReferenceResolver}.
|
||||
*
|
||||
* @param referenceLoader must not be {@literal null}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
public DefaultReferenceResolver(ReferenceLoader referenceLoader, PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(referenceLoader, "ReferenceLoader must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "ExceptionTranslator must not be null!");
|
||||
|
||||
this.referenceLoader = referenceLoader;
|
||||
this.proxyFactory = new LazyLoadingProxyFactory(exceptionTranslator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
|
||||
|
||||
LookupFunction lookupFunction = (property.isCollectionLike() || property.isMap()) ? collectionLookupFunction
|
||||
: singleValueLookupFunction;
|
||||
|
||||
if (isLazyReference(property)) {
|
||||
return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader);
|
||||
}
|
||||
|
||||
return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the association expressed by the given {@link MongoPersistentProperty property} should be resolved lazily.
|
||||
*
|
||||
* @param property
|
||||
* @return return {@literal true} if the defined association is lazy.
|
||||
* @see DBRef#lazy()
|
||||
* @see DocumentReference#lazy()
|
||||
*/
|
||||
protected boolean isLazyReference(MongoPersistentProperty property) {
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return property.getDocumentReference().lazy();
|
||||
}
|
||||
|
||||
return property.getDBRef() != null && property.getDBRef().lazy();
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link ReferenceLoader} executing the lookup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
protected ReferenceLoader getReferenceLoader() {
|
||||
return referenceLoader;
|
||||
}
|
||||
|
||||
LazyLoadingProxyFactory getProxyFactory() {
|
||||
return proxyFactory;
|
||||
}
|
||||
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) {
|
||||
return proxyFactory.createLazyLoadingProxy(property, it -> {
|
||||
return referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader);
|
||||
}, source);
|
||||
}
|
||||
}
|
||||
@@ -21,13 +21,13 @@ import java.util.Map;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.BasicDBObject;
|
||||
import com.mongodb.DBObject;
|
||||
|
||||
/**
|
||||
@@ -67,6 +67,19 @@ class DocumentAccessor {
|
||||
return this.document;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies all of the mappings from the given {@link Document} to the underlying target {@link Document}. These
|
||||
* mappings will replace any mappings that the target document had for any of the keys currently in the specified map.
|
||||
*
|
||||
* @param source
|
||||
*/
|
||||
public void putAll(Document source) {
|
||||
|
||||
Map<String, Object> target = BsonUtils.asMap(document);
|
||||
|
||||
target.putAll(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts the given value into the backing {@link Document} based on the coordinates defined through the given
|
||||
* {@link MongoPersistentProperty}. By default this will be the plain field name. But field names might also consist
|
||||
@@ -110,28 +123,7 @@ class DocumentAccessor {
|
||||
*/
|
||||
@Nullable
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
Map<String, Object> map = BsonUtils.asMap(document);
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return map.get(fieldName);
|
||||
}
|
||||
|
||||
Iterator<String> parts = Arrays.asList(fieldName.split("\\.")).iterator();
|
||||
Map<String, Object> source = map;
|
||||
Object result = null;
|
||||
|
||||
while (source != null && parts.hasNext()) {
|
||||
|
||||
result = source.get(parts.next());
|
||||
|
||||
if (parts.hasNext()) {
|
||||
source = getAsMap(result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return BsonUtils.resolveValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -141,6 +133,7 @@ class DocumentAccessor {
|
||||
* @param entity must not be {@literal null}.
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
public Object getRawId(MongoPersistentEntity<?> entity) {
|
||||
return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.asMap(document).get("_id");
|
||||
}
|
||||
@@ -157,71 +150,7 @@ class DocumentAccessor {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
|
||||
String fieldName = property.getFieldName();
|
||||
|
||||
|
||||
if (this.document instanceof Document) {
|
||||
|
||||
if (((Document) this.document).containsKey(fieldName)) {
|
||||
return true;
|
||||
}
|
||||
} else if (this.document instanceof DBObject) {
|
||||
if (((DBObject) this.document).containsField(fieldName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String[] parts = fieldName.split("\\.");
|
||||
Map<String, Object> source;
|
||||
|
||||
if (this.document instanceof Document) {
|
||||
source = ((Document) this.document);
|
||||
} else {
|
||||
source = ((DBObject) this.document).toMap();
|
||||
}
|
||||
|
||||
Object result = null;
|
||||
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
|
||||
result = source.get(parts[i - 1]);
|
||||
source = getAsMap(result);
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return source.containsKey(parts[parts.length - 1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the given source object as map, i.e. {@link Document}s and maps as is or {@literal null} otherwise.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Map<String, Object> getAsMap(Object source) {
|
||||
|
||||
if (source instanceof Document) {
|
||||
return (Document) source;
|
||||
}
|
||||
|
||||
if (source instanceof BasicDBObject) {
|
||||
return (BasicDBObject) source;
|
||||
}
|
||||
|
||||
if (source instanceof Map) {
|
||||
return (Map<String, Object>) source;
|
||||
}
|
||||
|
||||
return null;
|
||||
return BsonUtils.hasValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,237 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.WeakHashMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* Internal API to construct {@link DocumentPointer} for a given property. Considers {@link LazyLoadingProxy},
|
||||
* registered {@link Object} to {@link DocumentPointer} {@link org.springframework.core.convert.converter.Converter},
|
||||
* simple {@literal _id} lookups and cases where the {@link DocumentPointer} needs to be computed via a lookup query.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
class DocumentPointerFactory {
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final Map<String, LinkageDocument> cache;
|
||||
|
||||
/**
|
||||
* A {@link Pattern} matching quoted and unquoted variants (with/out whitespaces) of
|
||||
* <code>{'_id' : ?#{#target} }</code>.
|
||||
*/
|
||||
private static final Pattern DEFAULT_LOOKUP_PATTERN = Pattern.compile("\\{\\s?" + // document start (whitespace opt)
|
||||
"['\"]?_id['\"]?" + // followed by an optionally quoted _id. Like: _id, '_id' or "_id"
|
||||
"?\\s?:\\s?" + // then a colon optionally wrapped inside whitespaces
|
||||
"['\"]?\\?#\\{#target\\}['\"]?" + // leading to the potentially quoted ?#{#target} expression
|
||||
"\\s*}"); // some optional whitespaces and document close
|
||||
|
||||
DocumentPointerFactory(ConversionService conversionService,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.conversionService = conversionService;
|
||||
this.mappingContext = mappingContext;
|
||||
this.cache = new WeakHashMap<>();
|
||||
}
|
||||
|
||||
DocumentPointer<?> computePointer(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentProperty property, Object value, Class<?> typeHint) {
|
||||
|
||||
if (value instanceof LazyLoadingProxy) {
|
||||
return () -> ((LazyLoadingProxy) value).getSource();
|
||||
}
|
||||
|
||||
if (conversionService.canConvert(typeHint, DocumentPointer.class)) {
|
||||
return conversionService.convert(value, DocumentPointer.class);
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getRequiredPersistentEntity(property.getAssociationTargetType());
|
||||
|
||||
if (usesDefaultLookup(property)) {
|
||||
return () -> persistentEntity.getIdentifierAccessor(value).getIdentifier();
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> valueEntity = mappingContext.getPersistentEntity(value.getClass());
|
||||
PersistentPropertyAccessor<Object> propertyAccessor;
|
||||
if (valueEntity == null) {
|
||||
propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value);
|
||||
} else {
|
||||
propertyAccessor = valueEntity.getPropertyPathAccessor(value);
|
||||
}
|
||||
|
||||
return cache.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::from)
|
||||
.getDocumentPointer(mappingContext, persistentEntity, propertyAccessor);
|
||||
}
|
||||
|
||||
private boolean usesDefaultLookup(MongoPersistentProperty property) {
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return DEFAULT_LOOKUP_PATTERN.matcher(property.getDocumentReference().lookup()).matches();
|
||||
}
|
||||
|
||||
Reference atReference = property.findAnnotation(Reference.class);
|
||||
if (atReference != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
throw new IllegalStateException(String.format("%s does not seem to be define Reference", property));
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object that computes a document pointer from a given lookup query by identifying SpEL expressions and
|
||||
* inverting it.
|
||||
*
|
||||
* <pre class="code">
|
||||
* // source
|
||||
* { 'firstname' : ?#{fn}, 'lastname' : '?#{ln} }
|
||||
*
|
||||
* // target
|
||||
* { 'fn' : ..., 'ln' : ... }
|
||||
* </pre>
|
||||
*
|
||||
* The actual pointer is the computed via
|
||||
* {@link #getDocumentPointer(MappingContext, MongoPersistentEntity, PersistentPropertyAccessor)} applying values from
|
||||
* the provided {@link PersistentPropertyAccessor} to the target document by looking at the keys of the expressions
|
||||
* from the source.
|
||||
*/
|
||||
static class LinkageDocument {
|
||||
|
||||
static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\?#\\{#?(?<fieldName>[\\w\\d\\.\\-)]*)\\}");
|
||||
static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("###_(?<index>\\d*)_###");
|
||||
|
||||
private final String lookup;
|
||||
private final org.bson.Document documentPointer;
|
||||
private final Map<String, String> placeholderMap;
|
||||
|
||||
static LinkageDocument from(String lookup) {
|
||||
return new LinkageDocument(lookup);
|
||||
}
|
||||
|
||||
private LinkageDocument(String lookup) {
|
||||
|
||||
this.lookup = lookup;
|
||||
this.placeholderMap = new LinkedHashMap<>();
|
||||
|
||||
int index = 0;
|
||||
Matcher matcher = EXPRESSION_PATTERN.matcher(lookup);
|
||||
String targetLookup = lookup;
|
||||
|
||||
while (matcher.find()) {
|
||||
|
||||
String expression = matcher.group();
|
||||
String fieldName = matcher.group("fieldName").replace("target.", "");
|
||||
|
||||
String placeholder = placeholder(index);
|
||||
placeholderMap.put(placeholder, fieldName);
|
||||
targetLookup = targetLookup.replace(expression, "'" + placeholder + "'");
|
||||
index++;
|
||||
}
|
||||
|
||||
this.documentPointer = org.bson.Document.parse(targetLookup);
|
||||
}
|
||||
|
||||
private String placeholder(int index) {
|
||||
return "###_" + index + "_###";
|
||||
}
|
||||
|
||||
private boolean isPlaceholder(String key) {
|
||||
return PLACEHOLDER_PATTERN.matcher(key).matches();
|
||||
}
|
||||
|
||||
DocumentPointer<Object> getDocumentPointer(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
|
||||
return () -> updatePlaceholders(documentPointer, new Document(), mappingContext, persistentEntity,
|
||||
propertyAccessor);
|
||||
}
|
||||
|
||||
Document updatePlaceholders(org.bson.Document source, org.bson.Document target,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
|
||||
|
||||
for (Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getKey().startsWith("$")) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
"Cannot derive document pointer from lookup '%s' using query operator (%s). Please consider registering a custom converter.",
|
||||
lookup, entry.getKey()));
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Document) {
|
||||
|
||||
MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey());
|
||||
if (persistentProperty != null && persistentProperty.isEntity()) {
|
||||
|
||||
MongoPersistentEntity<?> nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType());
|
||||
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
|
||||
nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty))));
|
||||
} else {
|
||||
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
|
||||
persistentEntity, propertyAccessor));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (placeholderMap.containsKey(entry.getValue())) {
|
||||
|
||||
String attribute = placeholderMap.get(entry.getValue());
|
||||
if (attribute.contains(".")) {
|
||||
attribute = attribute.substring(attribute.lastIndexOf('.') + 1);
|
||||
}
|
||||
|
||||
String fieldName = entry.getKey().equals("_id") ? "id" : entry.getKey();
|
||||
if (!fieldName.contains(".")) {
|
||||
|
||||
Object targetValue = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(fieldName));
|
||||
target.put(attribute, targetValue);
|
||||
continue;
|
||||
}
|
||||
|
||||
PersistentPropertyPath<?> path = mappingContext
|
||||
.getPersistentPropertyPath(PropertyPath.from(fieldName, persistentEntity.getTypeInformation()));
|
||||
Object targetValue = propertyAccessor.getProperty(path);
|
||||
target.put(attribute, targetValue);
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return target;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,18 +15,18 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Allows direct interaction with the underlying {@link LazyLoadingInterceptor}.
|
||||
* Allows direct interaction with the underlying {@code LazyLoadingInterceptor}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.5
|
||||
* @see LazyLoadingProxyFactory
|
||||
*/
|
||||
public interface LazyLoadingProxy {
|
||||
|
||||
@@ -46,4 +46,15 @@ public interface LazyLoadingProxy {
|
||||
*/
|
||||
@Nullable
|
||||
DBRef toDBRef();
|
||||
|
||||
/**
|
||||
* Returns the raw {@literal source} object that defines the reference.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
@Nullable
|
||||
default Object getSource() {
|
||||
return toDBRef();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,303 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* {@link ProxyFactory} to create a proxy for {@link MongoPersistentProperty#getType()} to resolve a reference lazily.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class LazyLoadingProxyFactory {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(LazyLoadingProxyFactory.class);
|
||||
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
public LazyLoadingProxyFactory(PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.objenesis = new ObjenesisStd(true);
|
||||
}
|
||||
|
||||
public Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback,
|
||||
Object source) {
|
||||
|
||||
Class<?> propertyType = property.getType();
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, callback, source, exceptionTranslator);
|
||||
|
||||
if (!propertyType.isInterface()) {
|
||||
|
||||
Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType));
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
|
||||
return factory;
|
||||
}
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
|
||||
return proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CGLib enhanced type for the given source type.
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getEnhancedTypeFor(Class<?> type) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
return enhancer.createClass();
|
||||
}
|
||||
|
||||
public static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
GET_SOURCE_METHOD = LazyLoadingProxy.class.getMethod("getSource");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private final DbRefResolverCallback callback;
|
||||
private final Object source;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private volatile boolean resolved;
|
||||
private @Nullable Object result;
|
||||
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, DbRefResolverCallback callback, Object source,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
this.property = property;
|
||||
this.callback = callback;
|
||||
this.source = source;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object invoke(MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable {
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return source instanceof DBRef ? source : null;
|
||||
}
|
||||
|
||||
if (GET_SOURCE_METHOD.equals(method)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(source);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(o, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode();
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
private String proxyToString(@Nullable Object source) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (source != null) {
|
||||
if (source instanceof DBRef) {
|
||||
description.append(((DBRef) source).getCollectionName());
|
||||
description.append(":");
|
||||
description.append(((DBRef) source).getId());
|
||||
} else {
|
||||
description.append(source);
|
||||
}
|
||||
} else {
|
||||
description.append(System.identityHashCode(source));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
private boolean proxyEquals(@Nullable Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
private int proxyHashCode() {
|
||||
return proxyToString(source).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (resolved) {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Accessing already resolved lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Resolving lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
|
||||
if (translatedException instanceof ClientSessionException) {
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex);
|
||||
}
|
||||
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -26,6 +26,7 @@ import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -144,9 +145,9 @@ public interface MongoConverter
|
||||
try {
|
||||
return getConversionService().canConvert(id.getClass(), targetType)
|
||||
? getConversionService().convert(id, targetType)
|
||||
: convertToMongoType(id, null);
|
||||
: convertToMongoType(id, (TypeInformation<?>) null);
|
||||
} catch (ConversionException o_O) {
|
||||
return convertToMongoType(id, null);
|
||||
return convertToMongoType(id,(TypeInformation<?>) null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,11 +27,18 @@ import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Currency;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.bson.BsonReader;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonWriter;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.bson.codecs.DecoderContext;
|
||||
import org.bson.codecs.EncoderContext;
|
||||
import org.bson.codecs.configuration.CodecRegistries;
|
||||
import org.bson.types.Binary;
|
||||
import org.bson.types.Code;
|
||||
import org.bson.types.Decimal128;
|
||||
@@ -45,11 +52,12 @@ import org.springframework.data.convert.ReadingConverter;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mongodb.core.query.Term;
|
||||
import org.springframework.data.mongodb.core.script.NamedMongoScript;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.NumberUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.MongoClientSettings;
|
||||
|
||||
/**
|
||||
* Wrapper class to contain useful converters for the usage with Mongo.
|
||||
*
|
||||
@@ -236,9 +244,27 @@ abstract class MongoConverters {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
private final Codec<Document> codec = CodecRegistries.fromRegistries(CodecRegistries.fromCodecs(new Codec<UUID>() {
|
||||
|
||||
@Override
|
||||
public void encode(BsonWriter writer, UUID value, EncoderContext encoderContext) {
|
||||
writer.writeString(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<UUID> getEncoderClass() {
|
||||
return UUID.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UUID decode(BsonReader reader, DecoderContext decoderContext) {
|
||||
throw new IllegalStateException("decode not supported");
|
||||
}
|
||||
}), MongoClientSettings.getDefaultCodecRegistry()).get(Document.class);
|
||||
|
||||
@Override
|
||||
public String convert(Document source) {
|
||||
return source.toJson();
|
||||
return source.toJson(codec);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -268,7 +294,7 @@ abstract class MongoConverters {
|
||||
@Override
|
||||
public NamedMongoScript convert(Document source) {
|
||||
|
||||
if(source.isEmpty()) {
|
||||
if (source.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents}
|
||||
* for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoDatabaseFactoryReferenceLoader.class);
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
|
||||
|
||||
MongoCollection<Document> collection = getCollection(context);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Bulk fetching {} from {}.{}.", referenceQuery,
|
||||
StringUtils.hasText(context.getDatabase()) ? context.getDatabase()
|
||||
: collection.getNamespace().getDatabaseName(),
|
||||
context.getCollection());
|
||||
}
|
||||
|
||||
return referenceQuery.apply(collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoCollection} for a given {@link ReferenceCollection} from the underlying
|
||||
* {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the {@link MongoCollection} targeted by the {@link ReferenceCollection}.
|
||||
*/
|
||||
protected MongoCollection<Document> getCollection(ReferenceCollection context) {
|
||||
|
||||
return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(),
|
||||
Document.class);
|
||||
}
|
||||
}
|
||||
@@ -40,6 +40,7 @@ import org.springframework.data.mongodb.core.query.MongoRegexCreator;
|
||||
import org.springframework.data.mongodb.core.query.MongoRegexCreator.MatchMode;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
import org.springframework.data.mongodb.core.query.UntypedExampleMatcher;
|
||||
import org.springframework.data.mongodb.util.DotPath;
|
||||
import org.springframework.data.support.ExampleMatcherAccessor;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -134,7 +135,7 @@ public class MongoExampleMapper {
|
||||
while (iter.hasNext()) {
|
||||
|
||||
Map.Entry<String, Object> entry = iter.next();
|
||||
String propertyPath = StringUtils.hasText(path) ? path + "." + entry.getKey() : entry.getKey();
|
||||
String propertyPath = DotPath.from(path).append(entry.getKey()).toString();
|
||||
String mappedPropertyPath = getMappedPropertyPath(propertyPath, probeType);
|
||||
|
||||
if (isEmptyIdProperty(entry)) {
|
||||
|
||||
@@ -17,6 +17,9 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.convert.EntityWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -43,7 +46,7 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
*/
|
||||
@Nullable
|
||||
default Object convertToMongoType(@Nullable Object obj) {
|
||||
return convertToMongoType(obj, null);
|
||||
return convertToMongoType(obj, (TypeInformation<?>) null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -57,6 +60,10 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
@Nullable
|
||||
Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation<?> typeInformation);
|
||||
|
||||
default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity<?> entity) {
|
||||
return convertToMongoType(obj, entity.getTypeInformation());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link DBRef} to refer to the given object.
|
||||
*
|
||||
@@ -66,4 +73,17 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referingProperty);
|
||||
|
||||
/**
|
||||
* Creates a the {@link DocumentPointer} representing the link to another entity.
|
||||
*
|
||||
* @param source the object to create a document link to.
|
||||
* @param referringProperty the client-side property referring to the object which might carry additional metadata for
|
||||
* the {@link DBRef} object to create. Can be {@literal null}.
|
||||
* @return will never be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
default DocumentPointer<?> toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
|
||||
return () -> toDBRef(source, referringProperty);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
@@ -69,4 +70,11 @@ public enum NoOpDbRefResolver implements DbRefResolver {
|
||||
private <T> T handle() throws UnsupportedOperationException {
|
||||
throw new UnsupportedOperationException("DBRef resolution is not supported!");
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,13 +19,17 @@ import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
@@ -36,12 +40,14 @@ import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.InvalidPersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.MongoExpression;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter.NestedDocument;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty.PropertyToFieldNameConverter;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.mongodb.util.DotPath;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -63,9 +69,12 @@ import com.mongodb.DBRef;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author David Julia
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(QueryMapper.class);
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final Document META_TEXT_SCORE = new Document("$meta", "textScore");
|
||||
static final ClassTypeInformation<?> NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class);
|
||||
@@ -140,9 +149,23 @@ public class QueryMapper {
|
||||
try {
|
||||
|
||||
Field field = createPropertyField(entity, key, mappingContext);
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, BsonUtils.get(query, key));
|
||||
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
// TODO: move to dedicated method
|
||||
if (field.getProperty() != null && field.getProperty().isUnwrapped()) {
|
||||
|
||||
Object theNestedObject = BsonUtils.get(query, key);
|
||||
Document mappedValue = (Document) getMappedValue(field, theNestedObject);
|
||||
if (!StringUtils.hasText(field.getMappedKey())) {
|
||||
result.putAll(mappedValue);
|
||||
} else {
|
||||
result.put(field.getMappedKey(), mappedValue);
|
||||
}
|
||||
} else {
|
||||
|
||||
Entry<String, Object> entry = getMappedObjectForField(field, BsonUtils.get(query, key));
|
||||
|
||||
result.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
} catch (InvalidPersistentPropertyPath invalidPathException) {
|
||||
|
||||
// in case the object has not already been mapped
|
||||
@@ -173,13 +196,7 @@ public class QueryMapper {
|
||||
return new Document();
|
||||
}
|
||||
|
||||
Document mappedSort = new Document();
|
||||
for (Map.Entry<String, Object> entry : BsonUtils.asMap(sortObject).entrySet()) {
|
||||
|
||||
Field field = createPropertyField(entity, entry.getKey(), mappingContext);
|
||||
mappedSort.put(field.getMappedKey(), entry.getValue());
|
||||
}
|
||||
|
||||
Document mappedSort = mapFieldsToPropertyNames(sortObject, entity);
|
||||
mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT);
|
||||
return mappedSort;
|
||||
}
|
||||
@@ -197,11 +214,30 @@ public class QueryMapper {
|
||||
|
||||
Assert.notNull(fieldsObject, "FieldsObject must not be null!");
|
||||
|
||||
Document mappedFields = fieldsObject.isEmpty() ? new Document() : getMappedObject(fieldsObject, entity);
|
||||
Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity);
|
||||
mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE);
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (fields.isEmpty()) {
|
||||
return new Document();
|
||||
|
||||
}
|
||||
Document target = new Document();
|
||||
for (Map.Entry<String, Object> entry : BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).entrySet()) {
|
||||
|
||||
Field field = createPropertyField(entity, entry.getKey(), mappingContext);
|
||||
if (field.getProperty() != null && field.getProperty().isUnwrapped()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(field.getMappedKey(), entry.getValue());
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
private void mapMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity, MetaMapping metaMapping) {
|
||||
|
||||
if (entity == null) {
|
||||
@@ -217,6 +253,44 @@ public class QueryMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Document filterUnwrappedObjects(Document fieldsObject, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (fieldsObject.isEmpty() || entity == null) {
|
||||
return fieldsObject;
|
||||
}
|
||||
|
||||
Document target = new Document();
|
||||
|
||||
for (Entry<String, Object> field : fieldsObject.entrySet()) {
|
||||
|
||||
try {
|
||||
|
||||
PropertyPath path = PropertyPath.from(field.getKey(), entity.getTypeInformation());
|
||||
PersistentPropertyPath<MongoPersistentProperty> persistentPropertyPath = mappingContext
|
||||
.getPersistentPropertyPath(path);
|
||||
MongoPersistentProperty property = mappingContext.getPersistentPropertyPath(path).getRequiredLeafProperty();
|
||||
|
||||
if (property.isUnwrapped() && property.isEntity()) {
|
||||
|
||||
MongoPersistentEntity<?> unwrappedEntity = mappingContext.getRequiredPersistentEntity(property);
|
||||
|
||||
for (MongoPersistentProperty unwrappedProperty : unwrappedEntity) {
|
||||
|
||||
DotPath dotPath = DotPath.from(persistentPropertyPath.toDotPath()).append(unwrappedProperty.getName());
|
||||
target.put(dotPath.toString(), field.getValue());
|
||||
}
|
||||
|
||||
} else {
|
||||
target.put(field.getKey(), field.getValue());
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
target.put(field.getKey(), field.getValue());
|
||||
}
|
||||
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
private Document getMappedTextScoreField(MongoPersistentProperty property) {
|
||||
return new Document(property.getFieldName(), META_TEXT_SCORE);
|
||||
}
|
||||
@@ -233,6 +307,10 @@ public class QueryMapper {
|
||||
String key = field.getMappedKey();
|
||||
Object value;
|
||||
|
||||
if (rawValue instanceof MongoExpression) {
|
||||
return createMapEntry(key, getMappedObject(((MongoExpression) rawValue).toDocument(), field.getEntity()));
|
||||
}
|
||||
|
||||
if (isNestedKeyword(rawValue) && !field.isIdField()) {
|
||||
Keyword keyword = new Keyword((Document) rawValue);
|
||||
value = getMappedKeyword(field, keyword);
|
||||
@@ -378,6 +456,10 @@ public class QueryMapper {
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (isNestedKeyword(value)) {
|
||||
return getMappedKeyword(new Keyword((Bson) value), documentField.getPropertyEntity());
|
||||
}
|
||||
@@ -497,6 +579,11 @@ public class QueryMapper {
|
||||
*/
|
||||
@Nullable
|
||||
protected Object delegateConvertToMongoType(Object source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (entity != null && entity.isUnwrapped()) {
|
||||
return converter.convertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
return converter.convertToMongoType(source, entity == null ? null : entity.getTypeInformation());
|
||||
}
|
||||
|
||||
@@ -534,7 +621,7 @@ public class QueryMapper {
|
||||
if (source instanceof Iterable) {
|
||||
BasicDBList result = new BasicDBList();
|
||||
for (Object element : (Iterable<?>) source) {
|
||||
result.add(createDbRefFor(element, property));
|
||||
result.add(createReferenceFor(element, property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -543,12 +630,12 @@ public class QueryMapper {
|
||||
Document result = new Document();
|
||||
Document dbObject = (Document) source;
|
||||
for (String key : dbObject.keySet()) {
|
||||
result.put(key, createDbRefFor(dbObject.get(key), property));
|
||||
result.put(key, createReferenceFor(dbObject.get(key), property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return createDbRefFor(source, property);
|
||||
return createReferenceFor(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -595,12 +682,17 @@ public class QueryMapper {
|
||||
return Collections.singletonMap(key, value).entrySet().iterator().next();
|
||||
}
|
||||
|
||||
private DBRef createDbRefFor(Object source, MongoPersistentProperty property) {
|
||||
private Object createReferenceFor(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
return (DBRef) source;
|
||||
}
|
||||
|
||||
if (property != null && (property.isDocumentReference()
|
||||
|| (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) {
|
||||
return converter.toDocumentPointer(source, property).getPointer();
|
||||
}
|
||||
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
@@ -635,7 +727,7 @@ public class QueryMapper {
|
||||
* @param candidate
|
||||
* @return
|
||||
*/
|
||||
protected boolean isNestedKeyword(Object candidate) {
|
||||
protected boolean isNestedKeyword(@Nullable Object candidate) {
|
||||
|
||||
if (!(candidate instanceof Document)) {
|
||||
return false;
|
||||
@@ -680,12 +772,13 @@ public class QueryMapper {
|
||||
* converted one by one.
|
||||
*
|
||||
* @param documentField the field and its meta data
|
||||
* @param value the actual value
|
||||
* @param value the actual value. Can be {@literal null}.
|
||||
* @return the potentially converted target value.
|
||||
*/
|
||||
private Object applyFieldTargetTypeHintToValue(Field documentField, Object value) {
|
||||
@Nullable
|
||||
private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) {
|
||||
|
||||
if (documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) {
|
||||
return value;
|
||||
}
|
||||
|
||||
@@ -716,7 +809,6 @@ public class QueryMapper {
|
||||
*/
|
||||
static class Keyword {
|
||||
|
||||
private static final String N_OR_PATTERN = "\\$.*or";
|
||||
private static final Set<String> NON_DBREF_CONVERTING_KEYWORDS = new HashSet<>(
|
||||
Arrays.asList("$", "$size", "$slice", "$gt", "$lt"));
|
||||
|
||||
@@ -747,7 +839,7 @@ public class QueryMapper {
|
||||
}
|
||||
|
||||
public boolean isOrOrNor() {
|
||||
return key.matches(N_OR_PATTERN);
|
||||
return key.equalsIgnoreCase("$or") || key.equalsIgnoreCase("$nor");
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -867,6 +959,11 @@ public class QueryMapper {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
MongoPersistentEntity<?> getEntity() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the field represents an association.
|
||||
*
|
||||
@@ -912,6 +1009,7 @@ public class QueryMapper {
|
||||
public TypeInformation<?> getTypeHint() {
|
||||
return ClassTypeInformation.OBJECT;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1018,6 +1116,12 @@ public class QueryMapper {
|
||||
return property == null ? null : mappingContext.getPersistentEntity(property);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public MongoPersistentEntity<?> getEntity() {
|
||||
return entity;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.convert.QueryMapper.Field#isAssociation()
|
||||
@@ -1086,8 +1190,8 @@ public class QueryMapper {
|
||||
removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression));
|
||||
|
||||
if (sourceProperty != null && sourceProperty.getOwner().equals(entity)) {
|
||||
return mappingContext
|
||||
.getPersistentPropertyPath(PropertyPath.from(sourceProperty.getName(), entity.getTypeInformation()));
|
||||
return mappingContext.getPersistentPropertyPath(
|
||||
PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation()));
|
||||
}
|
||||
|
||||
PropertyPath path = forName(rawPath);
|
||||
@@ -1095,29 +1199,47 @@ public class QueryMapper {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = tryToResolvePersistentPropertyPath(path);
|
||||
|
||||
PersistentPropertyPath<MongoPersistentProperty> propertyPath = mappingContext.getPersistentPropertyPath(path);
|
||||
if (propertyPath == null) {
|
||||
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
boolean associationDetected = false;
|
||||
if (QueryMapper.LOGGER.isInfoEnabled()) {
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
String types = StringUtils.collectionToDelimitedString(
|
||||
path.stream().map(it -> it.getType().getSimpleName()).collect(Collectors.toList()), " -> ");
|
||||
QueryMapper.LOGGER.info(
|
||||
"Could not map '{}'. Maybe a fragment in '{}' is considered a simple type. Mapper continues with {}.",
|
||||
path, types, pathExpression);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
MongoPersistentProperty property = iterator.next();
|
||||
Iterator<MongoPersistentProperty> iterator = propertyPath.iterator();
|
||||
boolean associationDetected = false;
|
||||
|
||||
if (property.isAssociation()) {
|
||||
associationDetected = true;
|
||||
continue;
|
||||
}
|
||||
while (iterator.hasNext()) {
|
||||
|
||||
if (associationDetected && !property.isIdProperty()) {
|
||||
throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression));
|
||||
}
|
||||
MongoPersistentProperty property = iterator.next();
|
||||
|
||||
if (property.isAssociation()) {
|
||||
associationDetected = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
return propertyPath;
|
||||
} catch (InvalidPersistentPropertyPath e) {
|
||||
if (associationDetected && !property.isIdProperty()) {
|
||||
throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression));
|
||||
}
|
||||
}
|
||||
|
||||
return propertyPath;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private PersistentPropertyPath<MongoPersistentProperty> tryToResolvePersistentPropertyPath(PropertyPath path) {
|
||||
|
||||
try {
|
||||
return mappingContext.getPersistentPropertyPath(path);
|
||||
} catch (MappingException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1146,13 +1268,21 @@ public class QueryMapper {
|
||||
return forName(path.substring(0, path.length() - 3) + "id");
|
||||
}
|
||||
|
||||
// Ok give it another try quoting
|
||||
try {
|
||||
return PropertyPath.from(Pattern.quote(path), entity.getTypeInformation());
|
||||
} catch (PropertyReferenceException | InvalidPersistentPropertyPath ex) {
|
||||
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isPathToJavaLangClassProperty(PropertyPath path) {
|
||||
|
||||
if (path.getType().equals(Class.class) && path.getLeafProperty().getOwningType().getType().equals(Class.class)) {
|
||||
if ((path.getType() == Class.class || path.getType().equals(Object.class))
|
||||
&& path.getLeafProperty().getType() == Class.class) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -1238,12 +1368,17 @@ public class QueryMapper {
|
||||
static class KeyMapper {
|
||||
|
||||
private final Iterator<String> iterator;
|
||||
private int currentIndex;
|
||||
private String currentPropertyRoot;
|
||||
private final List<String> pathParts;
|
||||
|
||||
public KeyMapper(String key,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.iterator = Arrays.asList(key.split("\\.")).iterator();
|
||||
this.iterator.next();
|
||||
this.pathParts = Arrays.asList(key.split("\\."));
|
||||
this.iterator = pathParts.iterator();
|
||||
this.currentPropertyRoot = iterator.next();
|
||||
this.currentIndex = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1255,21 +1390,31 @@ public class QueryMapper {
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property));
|
||||
|
||||
boolean inspect = iterator.hasNext();
|
||||
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
currentIndex++;
|
||||
|
||||
boolean isPositional = (isPositionalParameter(partial) && (property.isMap() || property.isCollectionLike()));
|
||||
boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike() ;
|
||||
if(property.isMap() && currentPropertyRoot.equals(partial) && iterator.hasNext()){
|
||||
partial = iterator.next();
|
||||
currentIndex++;
|
||||
}
|
||||
|
||||
if (isPositional) {
|
||||
if (isPositional || property.isMap() && !currentPropertyRoot.equals(partial)) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
if(currentIndex + 1 < pathParts.size()) {
|
||||
currentIndex++;
|
||||
currentPropertyRoot = pathParts.get(currentIndex);
|
||||
}
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a
|
||||
* {@link ReferenceLoader.DocumentReferenceQuery}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface ReferenceLoader {
|
||||
|
||||
/**
|
||||
* Obtain a single {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
|
||||
*
|
||||
* @param referenceQuery must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the matching {@link Document} or {@literal null} if none found.
|
||||
*/
|
||||
@Nullable
|
||||
default Document fetchOne(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
|
||||
|
||||
Iterator<Document> it = fetchMany(referenceQuery, context).iterator();
|
||||
return it.hasNext() ? it.next() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain multiple {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
|
||||
*
|
||||
* @param referenceQuery must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the matching {@link Document} or {@literal null} if none found.
|
||||
*/
|
||||
Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context);
|
||||
|
||||
/**
|
||||
* The {@link DocumentReferenceQuery} defines the criteria by which {@link Document documents} should be matched
|
||||
* applying potentially given order criteria.
|
||||
*/
|
||||
interface DocumentReferenceQuery {
|
||||
|
||||
/**
|
||||
* Get the query to obtain matching {@link Document documents}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Bson getQuery();
|
||||
|
||||
/**
|
||||
* Get the sort criteria for ordering results.
|
||||
*
|
||||
* @return an empty {@link Document} by default. Never {@literal null}.
|
||||
*/
|
||||
default Bson getSort() {
|
||||
return new Document();
|
||||
}
|
||||
|
||||
default Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
return restoreOrder(collection.find(getQuery()).sort(getSort()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore the order of fetched documents.
|
||||
*
|
||||
* @param documents must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
default Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
static DocumentReferenceQuery forSingleDocument(Bson bson) {
|
||||
|
||||
return new DocumentReferenceQuery() {
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return bson;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
|
||||
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
|
||||
return result != null ? Collections.singleton(result) : Collections.emptyList();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static DocumentReferenceQuery forManyDocuments(Bson bson) {
|
||||
|
||||
return new DocumentReferenceQuery() {
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return bson;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
return collection.find(getQuery()).sort(getSort());
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,451 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.SpELContext;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingContext;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
|
||||
import org.springframework.data.mongodb.util.json.ValueProvider;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* A common delegate for {@link ReferenceResolver} implementations to resolve a reference to one/many target documents
|
||||
* that are converted to entities.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.3
|
||||
*/
|
||||
public final class ReferenceLookupDelegate {
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final SpELContext spELContext;
|
||||
private final ParameterBindingDocumentCodec codec;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReferenceLookupDelegate}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param spELContext must not be {@literal null}.
|
||||
*/
|
||||
public ReferenceLookupDelegate(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
SpELContext spELContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null");
|
||||
Assert.notNull(spELContext, "SpELContext must not be null");
|
||||
|
||||
this.mappingContext = mappingContext;
|
||||
this.spELContext = spELContext;
|
||||
this.codec = new ParameterBindingDocumentCodec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the reference expressed by the given property.
|
||||
*
|
||||
* @param property the reference defining property. Must not be {@literal null}. THe
|
||||
* @param value the source value identifying to the referenced entity. Must not be {@literal null}.
|
||||
* @param lookupFunction to execute a lookup query. Must not be {@literal null}.
|
||||
* @param entityReader the callback to convert raw source values into actual domain types. Must not be
|
||||
* {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction,
|
||||
MongoEntityReader entityReader) {
|
||||
|
||||
DocumentReferenceQuery filter = computeFilter(property, value, spELContext);
|
||||
ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext);
|
||||
|
||||
Iterable<Document> result = lookupFunction.apply(filter, referenceCollection);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return entityReader.read(result, property.getTypeInformation());
|
||||
}
|
||||
|
||||
if (!result.iterator().hasNext()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Object resultValue = result.iterator().next();
|
||||
return resultValue != null ? entityReader.read(resultValue, property.getTypeInformation()) : null;
|
||||
}
|
||||
|
||||
private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value,
|
||||
SpELContext spELContext) {
|
||||
|
||||
// Use the first value as a reference for others in case of collection like
|
||||
if (value instanceof Iterable) {
|
||||
value = ((Iterable<?>) value).iterator().next();
|
||||
}
|
||||
|
||||
// handle DBRef value
|
||||
if (value instanceof DBRef) {
|
||||
return ReferenceCollection.fromDBRef((DBRef) value);
|
||||
}
|
||||
|
||||
String collection = mappingContext.getRequiredPersistentEntity(property.getAssociationTargetType()).getCollection();
|
||||
|
||||
if (value instanceof Document) {
|
||||
|
||||
Document documentPointer = (Document) value;
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
|
||||
ParameterBindingContext bindingContext = bindingContext(property, value, spELContext);
|
||||
DocumentReference documentReference = property.getDocumentReference();
|
||||
|
||||
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext,
|
||||
() -> documentPointer.get("db", String.class));
|
||||
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext,
|
||||
() -> documentPointer.get("collection", collection));
|
||||
return new ReferenceCollection(targetDatabase, targetCollection);
|
||||
}
|
||||
|
||||
return new ReferenceCollection(documentPointer.getString("db"), documentPointer.get("collection", collection));
|
||||
}
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
|
||||
ParameterBindingContext bindingContext = bindingContext(property, value, spELContext);
|
||||
DocumentReference documentReference = property.getDocumentReference();
|
||||
|
||||
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null);
|
||||
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> collection);
|
||||
|
||||
return new ReferenceCollection(targetDatabase, targetCollection);
|
||||
}
|
||||
|
||||
return new ReferenceCollection(null, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the given {@link ParameterBindingContext} to compute potential expressions against the value.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param bindingContext must not be {@literal null}.
|
||||
* @param defaultValue
|
||||
* @param <T>
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier<T> defaultValue) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return defaultValue.get();
|
||||
}
|
||||
|
||||
// parameter binding requires a document, since we do not have one, construct it.
|
||||
if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) {
|
||||
String s = "{ 'target-value' : " + value + "}";
|
||||
T evaluated = (T) codec.decode(s, bindingContext).get("target-value");
|
||||
return evaluated != null ? evaluated : defaultValue.get();
|
||||
}
|
||||
|
||||
if (BsonUtils.isJsonDocument(value)) {
|
||||
return (T) codec.decode(value, bindingContext);
|
||||
}
|
||||
|
||||
T evaluated = (T) bindingContext.evaluateExpression(value);
|
||||
return evaluated != null ? evaluated : defaultValue.get();
|
||||
}
|
||||
|
||||
ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
return new ParameterBindingContext(valueProviderFor(source), spELContext.getParser(),
|
||||
() -> evaluationContextFor(property, source, spELContext));
|
||||
}
|
||||
|
||||
ValueProvider valueProviderFor(Object source) {
|
||||
|
||||
return (index) -> {
|
||||
if (source instanceof Document) {
|
||||
return Streamable.of(((Document) source).values()).toList().get(index);
|
||||
}
|
||||
return source;
|
||||
};
|
||||
}
|
||||
|
||||
EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
EvaluationContext ctx = spELContext.getEvaluationContext(source);
|
||||
ctx.setVariable("target", source);
|
||||
ctx.setVariable(property.getName(), source);
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the query to retrieve linked documents.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param value must not be {@literal null}.
|
||||
* @param spELContext must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) {
|
||||
|
||||
DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference()
|
||||
: ReferenceEmulatingDocumentReference.INSTANCE;
|
||||
|
||||
String lookup = documentReference.lookup();
|
||||
|
||||
Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext),
|
||||
() -> new Document());
|
||||
|
||||
if (property.isCollectionLike() && value instanceof Collection) {
|
||||
|
||||
List<Document> ors = new ArrayList<>();
|
||||
for (Object entry : (Collection<Object>) value) {
|
||||
|
||||
Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext));
|
||||
ors.add(decoded);
|
||||
}
|
||||
|
||||
return new ListDocumentReferenceQuery(new Document("$or", ors), sort);
|
||||
}
|
||||
|
||||
if (property.isMap() && value instanceof Map) {
|
||||
|
||||
Map<Object, Document> filterMap = new LinkedHashMap<>();
|
||||
|
||||
for (Entry<Object, Object> entry : ((Map<Object, Object>) value).entrySet()) {
|
||||
|
||||
Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext));
|
||||
filterMap.put(entry.getKey(), decoded);
|
||||
}
|
||||
|
||||
return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap);
|
||||
}
|
||||
|
||||
return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, value, spELContext)), sort);
|
||||
}
|
||||
|
||||
enum ReferenceEmulatingDocumentReference implements DocumentReference {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Class<? extends Annotation> annotationType() {
|
||||
return DocumentReference.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String db() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String collection() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String lookup() {
|
||||
return "{ '_id' : ?#{#target} }";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sort() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean lazy() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation fetching a single {@link Document}.
|
||||
*/
|
||||
static class SingleDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
|
||||
public SingleDocumentReferenceQuery(Document query, Document sort) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
|
||||
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
|
||||
return result != null ? Collections.singleton(result) : Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
|
||||
* {@link Map} structure. Restores the original map order by matching individual query documents against the actual
|
||||
* values.
|
||||
*/
|
||||
static class MapDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
private final Map<Object, Document> filterOrderMap;
|
||||
|
||||
public MapDocumentReferenceQuery(Document query, Document sort, Map<Object, Document> filterOrderMap) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
this.filterOrderMap = filterOrderMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
|
||||
Map<String, Object> targetMap = new LinkedHashMap<>();
|
||||
List<Document> collected = documents instanceof List ? (List<Document>) documents
|
||||
: Streamable.of(documents).toList();
|
||||
|
||||
for (Entry<Object, Document> filterMapping : filterOrderMap.entrySet()) {
|
||||
|
||||
Optional<Document> first = collected.stream()
|
||||
.filter(it -> it.entrySet().containsAll(filterMapping.getValue().entrySet())).findFirst();
|
||||
|
||||
targetMap.put(filterMapping.getKey().toString(), first.orElse(null));
|
||||
}
|
||||
return Collections.singleton(new Document(targetMap));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
|
||||
* {@link Collection} like structure. Restores the original order by matching individual query documents against the
|
||||
* actual values.
|
||||
*/
|
||||
static class ListDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
|
||||
public ListDocumentReferenceQuery(Document query, Document sort) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
|
||||
List<Document> target = documents instanceof List ? (List<Document>) documents
|
||||
: Streamable.of(documents).toList();
|
||||
|
||||
if (!sort.isEmpty() || !query.containsKey("$or")) {
|
||||
return target;
|
||||
}
|
||||
|
||||
List<Document> ors = query.get("$or", List.class);
|
||||
return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public Document getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
int compareAgainstReferenceIndex(List<Document> referenceList, Document document1, Document document2) {
|
||||
|
||||
for (Document document : referenceList) {
|
||||
|
||||
Set<Entry<String, Object>> entries = document.entrySet();
|
||||
if (document1.entrySet().containsAll(entries)) {
|
||||
return -1;
|
||||
}
|
||||
if (document2.entrySet().containsAll(entries)) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
return referenceList.size();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The function that can execute a given {@link DocumentReferenceQuery} within the {@link ReferenceCollection} to
|
||||
* obtain raw results.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
interface LookupFunction {
|
||||
|
||||
/**
|
||||
* @param referenceQuery never {@literal null}.
|
||||
* @param referenceCollection never {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Iterable<Document> apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* The {@link ReferenceResolver} allows to load and convert linked entities.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ReferenceResolver {
|
||||
|
||||
/**
|
||||
* Resolve the association defined via the given property from a given source value. May return a
|
||||
* {@link LazyLoadingProxy proxy instance} in case of a lazy loading association. The resolved value is assignable to
|
||||
* {@link PersistentProperty#getType()}.
|
||||
*
|
||||
* @param property the association defining property.
|
||||
* @param source the association source value.
|
||||
* @param referenceLookupDelegate the lookup executing component.
|
||||
* @param entityReader conversion function capable of constructing entities from raw source.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader);
|
||||
|
||||
/**
|
||||
* {@link ReferenceCollection} is a value object that contains information about the target database and collection
|
||||
* name of an association.
|
||||
*/
|
||||
class ReferenceCollection {
|
||||
|
||||
@Nullable //
|
||||
private final String database;
|
||||
private final String collection;
|
||||
|
||||
/**
|
||||
* @param database can be {@literal null} to indicate the configured default
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase() database} should be used.
|
||||
* @param collection the target collection name. Must not be {@literal null}.
|
||||
*/
|
||||
public ReferenceCollection(@Nullable String database, String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be empty or null!");
|
||||
|
||||
this.database = database;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link ReferenceCollection} from the given {@link DBRef}.
|
||||
*
|
||||
* @param dbRef must not be {@literal null}.
|
||||
* @return new instance of {@link ReferenceCollection}.
|
||||
*/
|
||||
public static ReferenceCollection fromDBRef(DBRef dbRef) {
|
||||
return new ReferenceCollection(dbRef.getDatabaseName(), dbRef.getCollectionName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the target collection name.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getCollection() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the target database name. If {@literal null} the default database should be used.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Domain type conversion callback interface that allows to read the {@code source} object into a mapped object.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
interface MongoEntityReader {
|
||||
|
||||
/**
|
||||
* Read values from the given source into an object defined via the given {@link TypeInformation}.
|
||||
*
|
||||
* @param source never {@literal null}.
|
||||
* @param typeInformation information about the desired target type.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Object read(Object source, TypeInformation<?> typeInformation);
|
||||
}
|
||||
}
|
||||
@@ -132,6 +132,11 @@ public class UpdateMapper extends QueryMapper {
|
||||
*/
|
||||
@Override
|
||||
protected Object delegateConvertToMongoType(Object source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
if(entity != null && entity.isUnwrapped()) {
|
||||
return converter.convertToMongoType(source, entity);
|
||||
}
|
||||
|
||||
return converter.convertToMongoType(source,
|
||||
entity == null ? ClassTypeInformation.OBJECT : getTypeHintForEntity(source, entity));
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
/**
|
||||
* Interface definition for structures defined in <a href="https://geojson.org/>GeoJSON</a> format.
|
||||
* Interface definition for structures defined in <a href="https://geojson.org/">GeoJSON</a> format.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
|
||||
@@ -24,7 +24,7 @@ import org.springframework.data.geo.Point;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.Version;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.JsonDeserializer;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
@@ -34,7 +34,10 @@ import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
|
||||
/**
|
||||
* A Jackson {@link Module} to register custom {@link JsonSerializer} and {@link JsonDeserializer}s for GeoJSON types.
|
||||
* A Jackson {@link Module} to register custom {@link JsonDeserializer}s for GeoJSON types.
|
||||
* <p />
|
||||
* Use {@link #geoJsonModule()} to obtain a {@link Module} containing both {@link JsonSerializer serializers} and
|
||||
* {@link JsonDeserializer deserializers}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
@@ -47,12 +50,87 @@ public class GeoJsonModule extends SimpleModule {
|
||||
|
||||
public GeoJsonModule() {
|
||||
|
||||
addDeserializer(GeoJsonPoint.class, new GeoJsonPointDeserializer());
|
||||
addDeserializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointDeserializer());
|
||||
addDeserializer(GeoJsonLineString.class, new GeoJsonLineStringDeserializer());
|
||||
addDeserializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringDeserializer());
|
||||
addDeserializer(GeoJsonPolygon.class, new GeoJsonPolygonDeserializer());
|
||||
addDeserializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonDeserializer());
|
||||
registerDeserializersIn(this);
|
||||
// TODO: add serializers as of next major version (4.0).
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link Module} containing {@link JsonDeserializer deserializers} for the following {@link GeoJson} types:
|
||||
* <ul>
|
||||
* <li>{@link GeoJsonPoint}</li>
|
||||
* <li>{@link GeoJsonMultiPoint}</li>
|
||||
* <li>{@link GeoJsonLineString}</li>
|
||||
* <li>{@link GeoJsonMultiLineString}</li>
|
||||
* <li>{@link GeoJsonPolygon}</li>
|
||||
* <li>{@link GeoJsonMultiPolygon}</li>
|
||||
* </ul>
|
||||
*
|
||||
* @return a {@link Module} containing {@link JsonDeserializer deserializers} for {@link GeoJson} types.
|
||||
* @since 3.2
|
||||
*/
|
||||
public static Module deserializers() {
|
||||
|
||||
SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson - Deserializers",
|
||||
new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson"));
|
||||
registerDeserializersIn(module);
|
||||
return module;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link Module} containing {@link JsonSerializer serializers} for the following {@link GeoJson} types:
|
||||
* <ul>
|
||||
* <li>{@link GeoJsonPoint}</li>
|
||||
* <li>{@link GeoJsonMultiPoint}</li>
|
||||
* <li>{@link GeoJsonLineString}</li>
|
||||
* <li>{@link GeoJsonMultiLineString}</li>
|
||||
* <li>{@link GeoJsonPolygon}</li>
|
||||
* <li>{@link GeoJsonMultiPolygon}</li>
|
||||
* </ul>
|
||||
*
|
||||
* @return a {@link Module} containing {@link JsonSerializer serializers} for {@link GeoJson} types.
|
||||
* @since 3.2
|
||||
*/
|
||||
public static Module serializers() {
|
||||
|
||||
SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson - Serializers",
|
||||
new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson"));
|
||||
GeoJsonSerializersModule.registerSerializersIn(module);
|
||||
return module;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link Module} containing {@link JsonSerializer serializers} and {@link JsonDeserializer deserializers}
|
||||
* for the following {@link GeoJson} types:
|
||||
* <ul>
|
||||
* <li>{@link GeoJsonPoint}</li>
|
||||
* <li>{@link GeoJsonMultiPoint}</li>
|
||||
* <li>{@link GeoJsonLineString}</li>
|
||||
* <li>{@link GeoJsonMultiLineString}</li>
|
||||
* <li>{@link GeoJsonPolygon}</li>
|
||||
* <li>{@link GeoJsonMultiPolygon}</li>
|
||||
* </ul>
|
||||
*
|
||||
* @return a {@link Module} containing {@link JsonSerializer serializers} and {@link JsonDeserializer deserializers}
|
||||
* for {@link GeoJson} types.
|
||||
* @since 3.2
|
||||
*/
|
||||
public static Module geoJsonModule() {
|
||||
|
||||
SimpleModule module = new SimpleModule("Spring Data MongoDB GeoJson",
|
||||
new Version(3, 2, 0, null, "org.springframework.data", "spring-data-mongodb-geojson"));
|
||||
GeoJsonSerializersModule.registerSerializersIn(module);
|
||||
registerDeserializersIn(module);
|
||||
return module;
|
||||
}
|
||||
|
||||
private static void registerDeserializersIn(SimpleModule module) {
|
||||
|
||||
module.addDeserializer(GeoJsonPoint.class, new GeoJsonPointDeserializer());
|
||||
module.addDeserializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointDeserializer());
|
||||
module.addDeserializer(GeoJsonLineString.class, new GeoJsonLineStringDeserializer());
|
||||
module.addDeserializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringDeserializer());
|
||||
module.addDeserializer(GeoJsonPolygon.class, new GeoJsonPolygonDeserializer());
|
||||
module.addDeserializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonDeserializer());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -67,8 +145,7 @@ public class GeoJsonModule extends SimpleModule {
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public T deserialize(@Nullable JsonParser jp, @Nullable DeserializationContext ctxt)
|
||||
throws IOException, JsonProcessingException {
|
||||
public T deserialize(@Nullable JsonParser jp, @Nullable DeserializationContext ctxt) throws IOException {
|
||||
|
||||
JsonNode node = jp.readValueAsTree();
|
||||
JsonNode coordinates = node.get("coordinates");
|
||||
@@ -134,7 +211,7 @@ public class GeoJsonModule extends SimpleModule {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<Point> points = new ArrayList<Point>(node.size());
|
||||
List<Point> points = new ArrayList<>(node.size());
|
||||
|
||||
for (JsonNode coordinatePair : node) {
|
||||
if (coordinatePair.isArray()) {
|
||||
@@ -145,7 +222,7 @@ public class GeoJsonModule extends SimpleModule {
|
||||
}
|
||||
|
||||
protected GeoJsonLineString toLineString(ArrayNode node) {
|
||||
return new GeoJsonLineString(toPoints((ArrayNode) node));
|
||||
return new GeoJsonLineString(toPoints(node));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -259,7 +336,7 @@ public class GeoJsonModule extends SimpleModule {
|
||||
@Override
|
||||
protected GeoJsonMultiLineString doDeserialize(ArrayNode coordinates) {
|
||||
|
||||
List<GeoJsonLineString> lines = new ArrayList<GeoJsonLineString>(coordinates.size());
|
||||
List<GeoJsonLineString> lines = new ArrayList<>(coordinates.size());
|
||||
|
||||
for (JsonNode lineString : coordinates) {
|
||||
if (lineString.isArray()) {
|
||||
@@ -336,7 +413,7 @@ public class GeoJsonModule extends SimpleModule {
|
||||
@Override
|
||||
protected GeoJsonMultiPolygon doDeserialize(ArrayNode coordinates) {
|
||||
|
||||
List<GeoJsonPolygon> polygones = new ArrayList<GeoJsonPolygon>(coordinates.size());
|
||||
List<GeoJsonPolygon> polygones = new ArrayList<>(coordinates.size());
|
||||
|
||||
for (JsonNode polygon : coordinates) {
|
||||
for (JsonNode ring : polygon) {
|
||||
|
||||
@@ -0,0 +1,313 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.geo;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.springframework.data.geo.Point;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JsonSerializer;
|
||||
import com.fasterxml.jackson.databind.Module;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
|
||||
/**
|
||||
* A Jackson {@link Module} to register custom {@link JsonSerializer}s for GeoJSON types.
|
||||
*
|
||||
* @author Bjorn Harvold
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
class GeoJsonSerializersModule extends SimpleModule {
|
||||
|
||||
private static final long serialVersionUID = 1340494654898895610L;
|
||||
|
||||
GeoJsonSerializersModule() {
|
||||
registerSerializersIn(this);
|
||||
}
|
||||
|
||||
|
||||
static void registerSerializersIn(SimpleModule module) {
|
||||
|
||||
module.addSerializer(GeoJsonPoint.class, new GeoJsonPointSerializer());
|
||||
module.addSerializer(GeoJsonMultiPoint.class, new GeoJsonMultiPointSerializer());
|
||||
module.addSerializer(GeoJsonLineString.class, new GeoJsonLineStringSerializer());
|
||||
module.addSerializer(GeoJsonMultiLineString.class, new GeoJsonMultiLineStringSerializer());
|
||||
module.addSerializer(GeoJsonPolygon.class, new GeoJsonPolygonSerializer());
|
||||
module.addSerializer(GeoJsonMultiPolygon.class, new GeoJsonMultiPolygonSerializer());
|
||||
}
|
||||
|
||||
/**
|
||||
* @param <T>
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static abstract class GeoJsonSerializer<T extends GeoJson<? extends Iterable>> extends JsonSerializer<T> {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.fasterxml.jackson.databind.JsonSerializer#serialize(java.lang.Object, com.fasterxml.jackson.core.JsonGenerator, com.fasterxml.jackson.databind.SerializerProvider)
|
||||
*/
|
||||
@Override
|
||||
public void serialize(T shape, JsonGenerator jsonGenerator, SerializerProvider serializers) throws IOException {
|
||||
|
||||
jsonGenerator.writeStartObject();
|
||||
jsonGenerator.writeStringField("type", shape.getType());
|
||||
jsonGenerator.writeArrayFieldStart("coordinates");
|
||||
|
||||
doSerialize(shape, jsonGenerator);
|
||||
|
||||
jsonGenerator.writeEndArray();
|
||||
jsonGenerator.writeEndObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform the actual serialization given the {@literal shape} as {@link GeoJson}.
|
||||
*
|
||||
* @param shape
|
||||
* @param jsonGenerator
|
||||
* @return
|
||||
*/
|
||||
protected abstract void doSerialize(T shape, JsonGenerator jsonGenerator) throws IOException;
|
||||
|
||||
/**
|
||||
* Write a {@link Point} as array. <br />
|
||||
* {@code [10.0, 20.0]}
|
||||
*
|
||||
* @param point
|
||||
* @param jsonGenerator
|
||||
* @throws IOException
|
||||
*/
|
||||
protected void writePoint(Point point, JsonGenerator jsonGenerator) throws IOException {
|
||||
|
||||
jsonGenerator.writeStartArray();
|
||||
writeRawCoordinates(point, jsonGenerator);
|
||||
jsonGenerator.writeEndArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the {@link Point} coordinates. <br />
|
||||
* {@code 10.0, 20.0}
|
||||
*
|
||||
* @param point
|
||||
* @param jsonGenerator
|
||||
* @throws IOException
|
||||
*/
|
||||
protected void writeRawCoordinates(Point point, JsonGenerator jsonGenerator) throws IOException {
|
||||
|
||||
jsonGenerator.writeNumber(point.getX());
|
||||
jsonGenerator.writeNumber(point.getY());
|
||||
}
|
||||
|
||||
/**
|
||||
* Write an {@link Iterable} of {@link Point} as array. <br />
|
||||
* {@code [ [10.0, 20.0], [30.0, 40.0], [50.0, 60.0] ]}
|
||||
*
|
||||
* @param points
|
||||
* @param jsonGenerator
|
||||
* @throws IOException
|
||||
*/
|
||||
protected void writeLine(Iterable<Point> points, JsonGenerator jsonGenerator) throws IOException {
|
||||
|
||||
jsonGenerator.writeStartArray();
|
||||
writeRawLine(points, jsonGenerator);
|
||||
jsonGenerator.writeEndArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Write an {@link Iterable} of {@link Point}. <br />
|
||||
* {@code [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]}
|
||||
*
|
||||
* @param points
|
||||
* @param jsonGenerator
|
||||
* @throws IOException
|
||||
*/
|
||||
protected void writeRawLine(Iterable<Point> points, JsonGenerator jsonGenerator) throws IOException {
|
||||
|
||||
for (Point point : points) {
|
||||
writePoint(point, jsonGenerator);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link JsonSerializer} converting {@link GeoJsonPoint} to:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* { "type": "Point", "coordinates": [10.0, 20.0] }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Bjorn Harvold
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
static class GeoJsonPointSerializer extends GeoJsonSerializer<GeoJsonPoint> {
|
||||
|
||||
@Override
|
||||
protected void doSerialize(GeoJsonPoint value, JsonGenerator jsonGenerator) throws IOException {
|
||||
writeRawCoordinates(value, jsonGenerator);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link JsonSerializer} converting {@link GeoJsonLineString} to:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "type": "LineString",
|
||||
* "coordinates": [
|
||||
* [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
|
||||
* ]
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Bjorn Harvold
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
static class GeoJsonLineStringSerializer extends GeoJsonSerializer<GeoJsonLineString> {
|
||||
|
||||
@Override
|
||||
protected void doSerialize(GeoJsonLineString value, JsonGenerator jsonGenerator) throws IOException {
|
||||
writeRawLine(value.getCoordinates(), jsonGenerator);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link JsonSerializer} converting {@link GeoJsonMultiPoint} to:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "type": "MultiPoint",
|
||||
* "coordinates": [
|
||||
* [10.0, 20.0], [30.0, 40.0], [50.0, 60.0]
|
||||
* ]
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Bjorn Harvold
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
static class GeoJsonMultiPointSerializer extends GeoJsonSerializer<GeoJsonMultiPoint> {
|
||||
|
||||
@Override
|
||||
protected void doSerialize(GeoJsonMultiPoint value, JsonGenerator jsonGenerator) throws IOException {
|
||||
writeRawLine(value.getCoordinates(), jsonGenerator);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link JsonSerializer} converting {@link GeoJsonMultiLineString} to:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "type": "MultiLineString",
|
||||
* "coordinates": [
|
||||
* [ [10.0, 20.0], [30.0, 40.0] ],
|
||||
* [ [50.0, 60.0] , [70.0, 80.0] ]
|
||||
* ]
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Bjorn Harvold
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
static class GeoJsonMultiLineStringSerializer extends GeoJsonSerializer<GeoJsonMultiLineString> {
|
||||
|
||||
@Override
|
||||
protected void doSerialize(GeoJsonMultiLineString value, JsonGenerator jsonGenerator) throws IOException {
|
||||
|
||||
for (GeoJsonLineString lineString : value.getCoordinates()) {
|
||||
writeLine(lineString.getCoordinates(), jsonGenerator);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link JsonSerializer} converting {@link GeoJsonPolygon} to:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "type": "Polygon",
|
||||
* "coordinates": [
|
||||
* [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
|
||||
* ]
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Bjorn Harvold
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
static class GeoJsonPolygonSerializer extends GeoJsonSerializer<GeoJsonPolygon> {
|
||||
|
||||
@Override
|
||||
protected void doSerialize(GeoJsonPolygon value, JsonGenerator jsonGenerator) throws IOException {
|
||||
|
||||
for (GeoJsonLineString lineString : value.getCoordinates()) {
|
||||
writeLine(lineString.getCoordinates(), jsonGenerator);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link JsonSerializer} converting {@link GeoJsonMultiPolygon} to:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "type": "MultiPolygon",
|
||||
* "coordinates": [
|
||||
* [[[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]]],
|
||||
* [[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
|
||||
* [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]]]
|
||||
* ]
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @author Bjorn Harvold
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
static class GeoJsonMultiPolygonSerializer extends GeoJsonSerializer<GeoJsonMultiPolygon> {
|
||||
|
||||
@Override
|
||||
protected void doSerialize(GeoJsonMultiPolygon value, JsonGenerator jsonGenerator) throws IOException {
|
||||
|
||||
for (GeoJsonPolygon polygon : value.getCoordinates()) {
|
||||
|
||||
jsonGenerator.writeStartArray();
|
||||
for (GeoJsonLineString lineString : polygon.getCoordinates()) {
|
||||
writeLine(lineString.getCoordinates(), jsonGenerator);
|
||||
}
|
||||
jsonGenerator.writeEndArray();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,17 @@ import org.springframework.util.ObjectUtils;
|
||||
public final class IndexField {
|
||||
|
||||
enum Type {
|
||||
GEO, TEXT, DEFAULT, HASH;
|
||||
GEO, TEXT, DEFAULT,
|
||||
|
||||
/**
|
||||
* @since 2.2
|
||||
*/
|
||||
HASH,
|
||||
|
||||
/**
|
||||
* @since 3.3
|
||||
*/
|
||||
WILDCARD;
|
||||
}
|
||||
|
||||
private final String key;
|
||||
@@ -48,7 +58,7 @@ public final class IndexField {
|
||||
if (Type.GEO.equals(type) || Type.TEXT.equals(type)) {
|
||||
Assert.isNull(direction, "Geo/Text indexes must not have a direction!");
|
||||
} else {
|
||||
if (!Type.HASH.equals(type)) {
|
||||
if (!(Type.HASH.equals(type) || Type.WILDCARD.equals(type))) {
|
||||
Assert.notNull(direction, "Default indexes require a direction");
|
||||
}
|
||||
}
|
||||
@@ -77,6 +87,18 @@ public final class IndexField {
|
||||
return new IndexField(key, null, Type.HASH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@literal wildcard} {@link IndexField} for the given key. The {@code key} must follow the
|
||||
* {@code fieldName.$**} notation.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @return new instance of {@link IndexField}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static IndexField wildcard(String key) {
|
||||
return new IndexField(key, null, Type.WILDCARD);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geo {@link IndexField} for the given key.
|
||||
*
|
||||
@@ -142,6 +164,16 @@ public final class IndexField {
|
||||
return Type.HASH.equals(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link IndexField} is contains a {@literal wildcard} expression.
|
||||
*
|
||||
* @return {@literal true} if {@link IndexField} contains a wildcard {@literal $**}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public boolean isWildcard() {
|
||||
return Type.WILDCARD.equals(type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
|
||||
@@ -55,6 +55,7 @@ public class IndexInfo {
|
||||
private @Nullable Duration expireAfter;
|
||||
private @Nullable String partialFilterExpression;
|
||||
private @Nullable Document collation;
|
||||
private @Nullable Document wildcardProjection;
|
||||
|
||||
public IndexInfo(List<IndexField> indexFields, String name, boolean unique, boolean sparse, String language) {
|
||||
|
||||
@@ -99,6 +100,8 @@ public class IndexInfo {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals("hashed", value)) {
|
||||
indexFields.add(IndexField.hashed(key));
|
||||
} else if (key.endsWith("$**")) {
|
||||
indexFields.add(IndexField.wildcard(key));
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
@@ -131,6 +134,10 @@ public class IndexInfo {
|
||||
info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class));
|
||||
}
|
||||
|
||||
if (sourceDocument.containsKey("wildcardProjection")) {
|
||||
info.wildcardProjection = sourceDocument.get("wildcardProjection", Document.class);
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
@@ -216,6 +223,16 @@ public class IndexInfo {
|
||||
return Optional.ofNullable(collation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get {@literal wildcardProjection} information.
|
||||
*
|
||||
* @return {@link Optional#empty() empty} if not set.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<Document> getWildcardProjection() {
|
||||
return Optional.ofNullable(wildcardProjection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the duration after which documents within the index expire.
|
||||
*
|
||||
@@ -234,6 +251,14 @@ public class IndexInfo {
|
||||
return getIndexFields().stream().anyMatch(IndexField::isHashed);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a wildcard index field is present.
|
||||
* @since 3.3
|
||||
*/
|
||||
public boolean isWildcard() {
|
||||
return getIndexFields().stream().anyMatch(IndexField::isWildcard);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
@@ -303,4 +328,5 @@ public class IndexInfo {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Provider interface to obtain {@link IndexOperations} by MongoDB collection name.
|
||||
*
|
||||
@@ -26,10 +28,22 @@ package org.springframework.data.mongodb.core.index;
|
||||
public interface IndexOperationsProvider {
|
||||
|
||||
/**
|
||||
* Returns the operations that can be performed on indexes
|
||||
* Returns the operations that can be performed on indexes.
|
||||
*
|
||||
* @param collectionName name of the MongoDB collection, must not be {@literal null}.
|
||||
* @return index operations on the named collection
|
||||
*/
|
||||
IndexOperations indexOps(String collectionName);
|
||||
default IndexOperations indexOps(String collectionName) {
|
||||
return indexOps(collectionName, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the operations that can be performed on indexes.
|
||||
*
|
||||
* @param collectionName name of the MongoDB collection, must not be {@literal null}.
|
||||
* @param type the type used for field mapping. Can be {@literal null}.
|
||||
* @return index operations on the named collection
|
||||
* @since 3.2
|
||||
*/
|
||||
IndexOperations indexOps(String collectionName, @Nullable Class<?> type);
|
||||
}
|
||||
|
||||
@@ -46,7 +46,9 @@ import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.mongodb.util.DotPath;
|
||||
import org.springframework.data.spel.EvaluationContextProvider;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
@@ -117,9 +119,12 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
Assert.notNull(document, () -> String
|
||||
.format("Entity %s is not a collection root. Make sure to annotate it with @Document!", root.getName()));
|
||||
|
||||
verifyWildcardIndexedProjection(root);
|
||||
|
||||
List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
String collection = root.getCollection();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root));
|
||||
indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions("", collection, root));
|
||||
indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection));
|
||||
|
||||
root.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
@@ -130,13 +135,32 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
private void verifyWildcardIndexedProjection(MongoPersistentEntity<?> entity) {
|
||||
|
||||
entity.doWithAll(it -> {
|
||||
|
||||
if (it.isAnnotationPresent(WildcardIndexed.class)) {
|
||||
|
||||
WildcardIndexed indexed = it.getRequiredAnnotation(WildcardIndexed.class);
|
||||
|
||||
if (!ObjectUtils.isEmpty(indexed.wildcardProjection())) {
|
||||
|
||||
throw new MappingException(String.format(
|
||||
"WildcardIndexed.wildcardProjection cannot be used on nested paths. Offending property: %s.%s",
|
||||
entity.getName(), it.getName()));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void potentiallyAddIndexForProperty(MongoPersistentEntity<?> root, MongoPersistentProperty persistentProperty,
|
||||
List<IndexDefinitionHolder> indexes, CycleGuard guard) {
|
||||
|
||||
try {
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexes.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(),
|
||||
persistentProperty.getFieldName(), Path.of(persistentProperty), root.getCollection(), guard));
|
||||
indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty),
|
||||
persistentProperty.isUnwrapped() ? "" : persistentProperty.getFieldName(), Path.of(persistentProperty),
|
||||
root.getCollection(), guard));
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(
|
||||
@@ -160,13 +184,18 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
* @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property
|
||||
* types. Will never be {@code null}.
|
||||
*/
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(final TypeInformation<?> type, final String dotPath,
|
||||
final Path path, final String collection, final CycleGuard guard) {
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(TypeInformation<?> type, String dotPath, Path path,
|
||||
String collection, CycleGuard guard) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(type), dotPath, path, collection, guard);
|
||||
}
|
||||
|
||||
final List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
private List<IndexDefinitionHolder> resolveIndexForEntity(MongoPersistentEntity<?> entity, String dotPath, Path path,
|
||||
String collection, CycleGuard guard) {
|
||||
|
||||
List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity));
|
||||
indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions(dotPath, collection, entity));
|
||||
|
||||
entity.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
.guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard));
|
||||
@@ -179,22 +208,26 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
private void guardAndPotentiallyAddIndexForProperty(MongoPersistentProperty persistentProperty, String dotPath,
|
||||
Path path, String collection, List<IndexDefinitionHolder> indexes, CycleGuard guard) {
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "") + persistentProperty.getFieldName();
|
||||
DotPath propertyDotPath = DotPath.from(dotPath);
|
||||
|
||||
if (!persistentProperty.isUnwrapped()) {
|
||||
propertyDotPath = propertyDotPath.append(persistentProperty.getFieldName());
|
||||
}
|
||||
|
||||
Path propertyPath = path.append(persistentProperty);
|
||||
guard.protect(persistentProperty, propertyPath);
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
try {
|
||||
indexes.addAll(resolveIndexForClass(persistentProperty.getTypeInformation().getActualType(), propertyDotPath,
|
||||
propertyPath, collection, guard));
|
||||
indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty),
|
||||
propertyDotPath.toString(), propertyPath, collection, guard));
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
persistentProperty);
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(),
|
||||
collection, persistentProperty);
|
||||
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
@@ -206,6 +239,13 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
List<IndexDefinitionHolder> indices = new ArrayList<>(2);
|
||||
|
||||
if (persistentProperty.isUnwrapped() && (persistentProperty.isAnnotationPresent(Indexed.class)
|
||||
|| persistentProperty.isAnnotationPresent(HashIndexed.class)
|
||||
|| persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class))) {
|
||||
throw new InvalidDataAccessApiUsageException(
|
||||
String.format("Index annotation not allowed on unwrapped object for path '%s'.", dotPath));
|
||||
}
|
||||
|
||||
if (persistentProperty.isAnnotationPresent(Indexed.class)) {
|
||||
indices.add(createIndexDefinition(dotPath, collection, persistentProperty));
|
||||
} else if (persistentProperty.isAnnotationPresent(GeoSpatialIndexed.class)) {
|
||||
@@ -215,6 +255,11 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
if (persistentProperty.isAnnotationPresent(HashIndexed.class)) {
|
||||
indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty));
|
||||
}
|
||||
if (persistentProperty.isAnnotationPresent(WildcardIndexed.class)) {
|
||||
indices.add(createWildcardIndexDefinition(dotPath, collection,
|
||||
persistentProperty.getRequiredAnnotation(WildcardIndexed.class),
|
||||
mappingContext.getPersistentEntity(persistentProperty)));
|
||||
}
|
||||
|
||||
return indices;
|
||||
}
|
||||
@@ -229,6 +274,18 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return createCompoundIndexDefinitions(dotPath, collection, entity);
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!entity.isAnnotationPresent(WildcardIndexed.class)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return Collections.singletonList(new IndexDefinitionHolder(dotPath,
|
||||
createWildcardIndexDefinition(dotPath, collection, entity.getRequiredAnnotation(WildcardIndexed.class), entity),
|
||||
collection));
|
||||
}
|
||||
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(
|
||||
MongoPersistentEntity<?> root, String collection) {
|
||||
|
||||
@@ -254,7 +311,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
}
|
||||
|
||||
try {
|
||||
appendTextIndexInformation("", Path.empty(), indexDefinitionBuilder, root,
|
||||
appendTextIndexInformation(DotPath.empty(), Path.empty(), indexDefinitionBuilder, root,
|
||||
new TextIndexIncludeOptions(IncludeStrategy.DEFAULT), new CycleGuard());
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
@@ -275,9 +332,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
}
|
||||
|
||||
private void appendTextIndexInformation(final String dotPath, final Path path,
|
||||
final TextIndexDefinitionBuilder indexDefinitionBuilder, final MongoPersistentEntity<?> entity,
|
||||
final TextIndexIncludeOptions includeOptions, final CycleGuard guard) {
|
||||
private void appendTextIndexInformation(DotPath dotPath, Path path, TextIndexDefinitionBuilder indexDefinitionBuilder,
|
||||
MongoPersistentEntity<?> entity, TextIndexIncludeOptions includeOptions, CycleGuard guard) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@@ -286,7 +342,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
guard.protect(persistentProperty, path);
|
||||
|
||||
if (persistentProperty.isExplicitLanguageProperty() && !StringUtils.hasText(dotPath)) {
|
||||
if (persistentProperty.isExplicitLanguageProperty() && dotPath.isEmpty()) {
|
||||
indexDefinitionBuilder.withLanguageOverride(persistentProperty.getFieldName());
|
||||
}
|
||||
|
||||
@@ -294,8 +350,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
if (includeOptions.isForce() || indexed != null || persistentProperty.isEntity()) {
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(dotPath) ? dotPath + "." : "")
|
||||
+ persistentProperty.getFieldName();
|
||||
DotPath propertyDotPath = dotPath.append(persistentProperty.getFieldName());
|
||||
|
||||
Path propertyPath = path.append(persistentProperty);
|
||||
|
||||
@@ -308,7 +363,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
TextIndexIncludeOptions optionsForNestedType = includeOptions;
|
||||
if (!IncludeStrategy.FORCE.equals(includeOptions.getStrategy()) && indexed != null) {
|
||||
optionsForNestedType = new TextIndexIncludeOptions(IncludeStrategy.FORCE,
|
||||
new TextIndexedFieldSpec(propertyDotPath, weight));
|
||||
new TextIndexedFieldSpec(propertyDotPath.toString(), weight));
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -321,7 +376,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
entity.getName()), e);
|
||||
}
|
||||
} else if (includeOptions.isForce() || indexed != null) {
|
||||
indexDefinitionBuilder.onField(propertyDotPath, weight);
|
||||
indexDefinitionBuilder.onField(propertyDotPath.toString(), weight);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -389,6 +444,32 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, String collection,
|
||||
WildcardIndexed index, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
WildcardIndex indexDefinition = new WildcardIndex(dotPath);
|
||||
|
||||
if (StringUtils.hasText(index.wildcardProjection()) && ObjectUtils.isEmpty(dotPath)) {
|
||||
indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity));
|
||||
}
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null));
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(index.partialFilter())) {
|
||||
indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity));
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(index.collation())) {
|
||||
indexDefinition.collation(evaluateCollation(index.collation(), entity));
|
||||
} else if (entity != null && entity.hasCollation()) {
|
||||
indexDefinition.collation(entity.getCollation());
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString,
|
||||
PersistentEntity<?, ?> entity) {
|
||||
|
||||
@@ -482,7 +563,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private PartialIndexFilter evaluatePartialFilter(String filterExpression, PersistentEntity<?,?> entity) {
|
||||
private PartialIndexFilter evaluatePartialFilter(String filterExpression, PersistentEntity<?, ?> entity) {
|
||||
|
||||
Object result = evaluate(filterExpression, getEvaluationContextForProperty(entity));
|
||||
|
||||
@@ -493,6 +574,32 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return PartialIndexFilter.of(BsonUtils.parse(filterExpression, null));
|
||||
}
|
||||
|
||||
private org.bson.Document evaluateWildcardProjection(String projectionExpression, PersistentEntity<?, ?> entity) {
|
||||
|
||||
Object result = evaluate(projectionExpression, getEvaluationContextForProperty(entity));
|
||||
|
||||
if (result instanceof org.bson.Document) {
|
||||
return (org.bson.Document) result;
|
||||
}
|
||||
|
||||
return BsonUtils.parse(projectionExpression, null);
|
||||
}
|
||||
|
||||
private Collation evaluateCollation(String collationExpression, PersistentEntity<?, ?> entity) {
|
||||
|
||||
Object result = evaluate(collationExpression, getEvaluationContextForProperty(entity));
|
||||
if (result instanceof org.bson.Document) {
|
||||
return Collation.from((org.bson.Document) result);
|
||||
}
|
||||
if (result instanceof Collation) {
|
||||
return (Collation) result;
|
||||
}
|
||||
if (result instanceof String) {
|
||||
return Collation.parse(result.toString());
|
||||
}
|
||||
throw new IllegalStateException("Cannot parse collation " + result);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given
|
||||
@@ -633,7 +740,7 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
|
||||
String propertyDotPath = (StringUtils.hasText(path) ? path + "." : "") + property.getFieldName();
|
||||
DotPath propertyDotPath = DotPath.from(path).append(property.getFieldName());
|
||||
|
||||
if (property.isAnnotationPresent(GeoSpatialIndexed.class) || property.isAnnotationPresent(TextIndexed.class)) {
|
||||
throw new MappingException(
|
||||
@@ -641,8 +748,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
propertyDotPath));
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath, collection,
|
||||
property);
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(),
|
||||
collection, property);
|
||||
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
@@ -982,6 +1089,11 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
public org.bson.Document getIndexOptions() {
|
||||
return indexDefinition.getIndexOptions();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "IndexDefinitionHolder{" + "indexKeys=" + getIndexKeys() + '}';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,198 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link WildcardIndex} is a specific {@link Index} that can be used to include all fields into an index based on the
|
||||
* {@code $**" : 1} pattern on a root object (the one typically carrying the
|
||||
* {@link org.springframework.data.mongodb.core.mapping.Document} annotation). On those it is possible to use
|
||||
* {@link #wildcardProjectionInclude(String...)} and {@link #wildcardProjectionExclude(String...)} to define specific
|
||||
* paths for in-/exclusion.
|
||||
* <p />
|
||||
* It can also be used to define an index on a specific field path and its subfields, e.g.
|
||||
* {@code "path.to.field.$**" : 1}. <br />
|
||||
* Note that {@literal wildcardProjections} are not allowed in this case.
|
||||
* <p />
|
||||
* <strong>LIMITATIONS</strong><br />
|
||||
* <ul>
|
||||
* <li>{@link #unique() Unique} and {@link #expire(long) ttl} options are not supported.</li>
|
||||
* <li>Keys used for sharding must not be included</li>
|
||||
* <li>Cannot be used to generate any type of geo index.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href= "https://docs.mongodb.com/manual/core/index-wildcard/">MongoDB Reference Documentation: Wildcard
|
||||
* Indexes/</a>
|
||||
* @since 3.3
|
||||
*/
|
||||
public class WildcardIndex extends Index {
|
||||
|
||||
private @Nullable String fieldName;
|
||||
private final Map<String, Object> wildcardProjection = new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link WildcardIndex} using {@code $**}.
|
||||
*/
|
||||
public WildcardIndex() {}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link WildcardIndex} for the given {@literal path}. If no {@literal path} is provided the
|
||||
* index will be considered a root one using {@code $**}. <br />
|
||||
* <strong>NOTE</strong> {@link #wildcardProjectionInclude(String...)}, {@link #wildcardProjectionExclude(String...)}
|
||||
* can only be used for top level index definitions having an {@literal empty} or {@literal null} path.
|
||||
*
|
||||
* @param path can be {@literal null}. If {@literal null} all fields will be indexed.
|
||||
*/
|
||||
public WildcardIndex(@Nullable String path) {
|
||||
this.fieldName = path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include the {@code _id} field in {@literal wildcardProjection}.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex includeId() {
|
||||
|
||||
wildcardProjection.put("_id", 1);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the index name to use.
|
||||
*
|
||||
* @param name
|
||||
* @return this.
|
||||
*/
|
||||
@Override
|
||||
public WildcardIndex named(String name) {
|
||||
|
||||
super.named(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unique option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index unique() {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'unique'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* ttl option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index expire(long seconds) {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* ttl option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index expire(long value, TimeUnit timeUnit) {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* ttl option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index expire(Duration duration) {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add fields to be included from indexing via {@code wildcardProjection}. <br />
|
||||
* This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes.
|
||||
*
|
||||
* @param paths must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex wildcardProjectionInclude(String... paths) {
|
||||
|
||||
for (String path : paths) {
|
||||
wildcardProjection.put(path, 1);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add fields to be excluded from indexing via {@code wildcardProjection}. <br />
|
||||
* This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes.
|
||||
*
|
||||
* @param paths must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex wildcardProjectionExclude(String... paths) {
|
||||
|
||||
for (String path : paths) {
|
||||
wildcardProjection.put(path, 0);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the fields to be in-/excluded from indexing via {@code wildcardProjection}. <br />
|
||||
* This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes.
|
||||
*
|
||||
* @param includeExclude must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex wildcardProjection(Map<String, Object> includeExclude) {
|
||||
|
||||
wildcardProjection.putAll(includeExclude);
|
||||
return this;
|
||||
}
|
||||
|
||||
private String getTargetFieldName() {
|
||||
return StringUtils.hasText(fieldName) ? (fieldName + ".$**") : "$**";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getIndexKeys() {
|
||||
return new Document(getTargetFieldName(), 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getIndexOptions() {
|
||||
|
||||
Document options = new Document(super.getIndexOptions());
|
||||
if (!CollectionUtils.isEmpty(wildcardProjection)) {
|
||||
options.put("wildcardProjection", new Document(wildcardProjection));
|
||||
}
|
||||
return options;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation for an entity or property that should be used as key for a
|
||||
* <a href="https://docs.mongodb.com/manual/core/index-wildcard/">Wildcard Index</a>. <br />
|
||||
* If placed on a {@link ElementType#TYPE type} that is a root level domain entity (one having an
|
||||
* {@link org.springframework.data.mongodb.core.mapping.Document} annotation) will advise the index creator to create a
|
||||
* wildcard index for it.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* @Document
|
||||
* @WildcardIndexed
|
||||
* public class Product {
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* db.product.createIndex({ "$**" : 1 } , {})
|
||||
* </pre>
|
||||
*
|
||||
* {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* @Document
|
||||
* @WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
|
||||
* public class User {
|
||||
* private @Id String id;
|
||||
* private UserMetadata userMetadata;
|
||||
* }
|
||||
*
|
||||
*
|
||||
* db.user.createIndex(
|
||||
* { "$**" : 1 },
|
||||
* { "wildcardProjection" :
|
||||
* { "userMetadata.age" : 0 }
|
||||
* }
|
||||
* )
|
||||
* </pre>
|
||||
*
|
||||
* Wildcard indexes can also be expressed by adding the annotation directly to the field. Please note that
|
||||
* {@literal wildcardProjection} is not allowed on nested paths.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* public class User {
|
||||
*
|
||||
* private @Id String id;
|
||||
*
|
||||
* @WildcardIndexed
|
||||
* private UserMetadata userMetadata;
|
||||
* }
|
||||
*
|
||||
*
|
||||
* db.user.createIndex({ "userMetadata.$**" : 1 }, {})
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
@Documented
|
||||
@Target({ ElementType.TYPE, ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface WildcardIndexed {
|
||||
|
||||
/**
|
||||
* Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template
|
||||
* expression}. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity.
|
||||
*
|
||||
* @return empty by default.
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return {@literal false} by default.
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}. <br />
|
||||
*
|
||||
* @return empty by default.
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/index-partial/">https://docs.mongodb.com/manual/core/index-partial/</a>
|
||||
*/
|
||||
String partialFilter() default "";
|
||||
|
||||
/**
|
||||
* Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String.
|
||||
* <br />
|
||||
* <strong>NOTE: </strong>Can only be applied on root level documents.
|
||||
*
|
||||
* @return empty by default.
|
||||
*/
|
||||
String wildcardProjection() default "";
|
||||
|
||||
/**
|
||||
* Defines the collation to apply.
|
||||
*
|
||||
* @return an empty {@link String} by default.
|
||||
*/
|
||||
String collation() default "";
|
||||
}
|
||||
@@ -164,7 +164,8 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
@Override
|
||||
public org.springframework.data.mongodb.core.query.Collation getCollation() {
|
||||
|
||||
Object collationValue = collationExpression != null ? collationExpression.getValue(getEvaluationContext(null), String.class)
|
||||
Object collationValue = collationExpression != null
|
||||
? collationExpression.getValue(getEvaluationContext(null), String.class)
|
||||
: this.collation;
|
||||
|
||||
if (collationValue == null) {
|
||||
|
||||
@@ -41,13 +41,14 @@ import org.springframework.util.StringUtils;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty<MongoPersistentProperty>
|
||||
implements MongoPersistentProperty {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(BasicMongoPersistentProperty.class);
|
||||
|
||||
private static final String ID_FIELD_NAME = "_id";
|
||||
public static final String ID_FIELD_NAME = "_id";
|
||||
private static final String LANGUAGE_FIELD_NAME = "language";
|
||||
private static final Set<Class<?>> SUPPORTED_ID_TYPES = new HashSet<Class<?>>();
|
||||
private static final Set<String> SUPPORTED_ID_PROPERTY_NAMES = new HashSet<String>();
|
||||
@@ -214,6 +215,19 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return annotation != null ? annotation.order() : Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#skipNullValues()
|
||||
*/
|
||||
@Override
|
||||
public boolean writeNullValues() {
|
||||
|
||||
org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation(
|
||||
org.springframework.data.mongodb.core.mapping.Field.class);
|
||||
|
||||
return annotation != null && annotation.write() == Field.Write.ALWAYS;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AbstractPersistentProperty#createAssociation()
|
||||
@@ -231,6 +245,15 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return isAnnotationPresent(DBRef.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isDocumentReference()
|
||||
*/
|
||||
@Override
|
||||
public boolean isDocumentReference() {
|
||||
return isAnnotationPresent(DocumentReference.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDBRef()
|
||||
@@ -240,6 +263,16 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return findAnnotation(DBRef.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDocumentReference()
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public DocumentReference getDocumentReference() {
|
||||
return findAnnotation(DocumentReference.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isLanguageProperty()
|
||||
@@ -266,4 +299,5 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
public boolean isTextScoreProperty() {
|
||||
return isAnnotationPresent(TextScore.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
private boolean dbRefResolved;
|
||||
private @Nullable DBRef dbref;
|
||||
private @Nullable String fieldName;
|
||||
private @Nullable Boolean writeNullValues;
|
||||
private @Nullable Class<?> fieldType;
|
||||
private @Nullable Boolean usePropertyAccess;
|
||||
private @Nullable Boolean isTransient;
|
||||
@@ -90,6 +91,20 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#skipNullValues()
|
||||
*/
|
||||
@Override
|
||||
public boolean writeNullValues() {
|
||||
|
||||
if (this.writeNullValues == null) {
|
||||
this.writeNullValues = super.writeNullValues();
|
||||
}
|
||||
|
||||
return this.writeNullValues;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#getFieldType()
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* A custom pointer to a linked document to be used along with {@link DocumentReference} for storing the linkage value.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface DocumentPointer<T> {
|
||||
|
||||
/**
|
||||
* The actual pointer value. This can be any simple type, like a {@link String} or {@link org.bson.types.ObjectId} or
|
||||
* a {@link org.bson.Document} holding more information like the target collection, multiple fields forming the key,
|
||||
* etc.
|
||||
*
|
||||
* @return the value stored in MongoDB and used for constructing the {@link DocumentReference#lookup() lookup query}.
|
||||
*/
|
||||
T getPointer();
|
||||
}
|
||||
@@ -0,0 +1,132 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
|
||||
/**
|
||||
* A {@link DocumentReference} allows referencing entities in MongoDB using a flexible schema. While the goal is the
|
||||
* same as when using {@link DBRef}, the store representation is different. The reference can be anything, a single
|
||||
* value, an entire {@link org.bson.Document}, basically everything that can be stored in MongoDB. By default, the
|
||||
* mapping layer will use the referenced entities {@literal id} value for storage and retrieval.
|
||||
*
|
||||
* <pre class="code">
|
||||
* public class Account {
|
||||
* private String id;
|
||||
* private Float total;
|
||||
* }
|
||||
*
|
||||
* public class Person {
|
||||
* private String id;
|
||||
* @DocumentReference
|
||||
* private List<Account> accounts;
|
||||
* }
|
||||
*
|
||||
* Account account = ...
|
||||
*
|
||||
* mongoTemplate.insert(account);
|
||||
*
|
||||
* template.update(Person.class)
|
||||
* .matching(where("id").is(...))
|
||||
* .apply(new Update().push("accounts").value(account))
|
||||
* .first();
|
||||
* </pre>
|
||||
*
|
||||
* {@link #lookup()} allows defining a query filter that is independent from the {@literal _id} field and in combination
|
||||
* with {@link org.springframework.data.convert.WritingConverter writing converters} offers a flexible way of defining
|
||||
* references between entities.
|
||||
*
|
||||
* <pre class="code">
|
||||
* public class Book {
|
||||
* private ObjectId id;
|
||||
* private String title;
|
||||
*
|
||||
* @Field("publisher_ac") @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
|
||||
* }
|
||||
*
|
||||
* public class Publisher {
|
||||
*
|
||||
* private ObjectId id;
|
||||
* private String acronym;
|
||||
* private String name;
|
||||
*
|
||||
* @DocumentReference(lazy = true) private List<Book> books;
|
||||
* }
|
||||
*
|
||||
* @WritingConverter
|
||||
* public class PublisherReferenceConverter implements Converter<Publisher, DocumentPointer<String>> {
|
||||
*
|
||||
* public DocumentPointer<String> convert(Publisher source) {
|
||||
* return () -> source.getAcronym();
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/database-references/#std-label-document-references">MongoDB
|
||||
* Reference Documentation</a>
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD })
|
||||
@Reference
|
||||
public @interface DocumentReference {
|
||||
|
||||
/**
|
||||
* The database the referenced entity resides in. Uses the default database provided by
|
||||
* {@link org.springframework.data.mongodb.MongoDatabaseFactory} if empty.
|
||||
*
|
||||
* @see MongoDatabaseFactory#getMongoDatabase()
|
||||
* @see MongoDatabaseFactory#getMongoDatabase(String)
|
||||
*/
|
||||
String db() default "";
|
||||
|
||||
/**
|
||||
* The collection the referenced entity resides in. Defaults to the collection of the referenced entity type.
|
||||
*
|
||||
* @see MongoPersistentEntity#getCollection()
|
||||
*/
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
* The single document lookup query. In case of an {@link java.util.Collection} or {@link java.util.Map} property the
|
||||
* individual lookups are combined via an {@code $or} operator. {@code target} points to the source value (or
|
||||
* document) stored at the reference property. Properties of {@code target} can be used to define the reference query.
|
||||
*
|
||||
* @return an {@literal _id} based lookup.
|
||||
*/
|
||||
String lookup() default "{ '_id' : ?#{#target} }";
|
||||
|
||||
/**
|
||||
* A specific sort.
|
||||
*/
|
||||
String sort() default "";
|
||||
|
||||
/**
|
||||
* Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}.
|
||||
*
|
||||
* @return {@literal false} by default.
|
||||
*/
|
||||
boolean lazy() default false;
|
||||
}
|
||||
@@ -28,6 +28,7 @@ import org.springframework.core.annotation.AliasFor;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -65,4 +66,34 @@ public @interface Field {
|
||||
* @since 2.2
|
||||
*/
|
||||
FieldType targetType() default FieldType.IMPLICIT;
|
||||
|
||||
/**
|
||||
* Write rules when to include a property value upon conversion. If set to {@link Write#NON_NULL} (default)
|
||||
* {@literal null} values are not written to the target {@code Document}. Setting the value to {@link Write#ALWAYS}
|
||||
* explicitly adds an entry for the given field holding {@literal null} as a value {@code 'fieldName' : null }.
|
||||
* <p />
|
||||
* <strong>NOTE</strong>Setting the value to {@link Write#ALWAYS} may lead to increased document size.
|
||||
*
|
||||
* @return {@link Write#NON_NULL} by default.
|
||||
* @since 3.3
|
||||
*/
|
||||
Write write() default Write.NON_NULL;
|
||||
|
||||
/**
|
||||
* Enumeration of write strategies to define when a property is included for write conversion.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
enum Write {
|
||||
|
||||
/**
|
||||
* Value that indicates that property is to be always included, independent of value of the property.
|
||||
*/
|
||||
ALWAYS,
|
||||
|
||||
/**
|
||||
* Value that indicates that only properties with non-{@literal null} values are to be included.
|
||||
*/
|
||||
NON_NULL
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.Property;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.util.NullableWrapperConverters;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
@@ -35,8 +36,9 @@ import org.springframework.lang.Nullable;
|
||||
*
|
||||
* @author Jon Brisbin
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersistentEntity<?>, MongoPersistentProperty>
|
||||
public class MongoMappingContext extends AbstractMappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>
|
||||
implements ApplicationContextAware {
|
||||
|
||||
private static final FieldNamingStrategy DEFAULT_NAMING_STRATEGY = PropertyNameFieldNamingStrategy.INSTANCE;
|
||||
@@ -68,6 +70,11 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
*/
|
||||
@Override
|
||||
protected boolean shouldCreatePersistentEntityFor(TypeInformation<?> type) {
|
||||
|
||||
if (NullableWrapperConverters.supports(type.getType())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return !MongoSimpleTypes.HOLDER.isSimpleType(type.getType()) && !AbstractMap.class.isAssignableFrom(type.getType());
|
||||
}
|
||||
|
||||
@@ -76,7 +83,7 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
* @see org.springframework.data.mapping.AbstractMappingContext#createPersistentProperty(java.lang.reflect.Field, java.beans.PropertyDescriptor, org.springframework.data.mapping.MutablePersistentEntity, org.springframework.data.mapping.SimpleTypeHolder)
|
||||
*/
|
||||
@Override
|
||||
public MongoPersistentProperty createPersistentProperty(Property property, BasicMongoPersistentEntity<?> owner,
|
||||
public MongoPersistentProperty createPersistentProperty(Property property, MongoPersistentEntity<?> owner,
|
||||
SimpleTypeHolder simpleTypeHolder) {
|
||||
return new CachingMongoPersistentProperty(property, owner, simpleTypeHolder, fieldNamingStrategy);
|
||||
}
|
||||
@@ -87,7 +94,7 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
*/
|
||||
@Override
|
||||
protected <T> BasicMongoPersistentEntity<T> createPersistentEntity(TypeInformation<T> typeInformation) {
|
||||
return new BasicMongoPersistentEntity<T>(typeInformation);
|
||||
return new BasicMongoPersistentEntity<>(typeInformation);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -96,7 +103,6 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
*/
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
|
||||
super.setApplicationContext(applicationContext);
|
||||
}
|
||||
|
||||
@@ -126,4 +132,17 @@ public class MongoMappingContext extends AbstractMappingContext<BasicMongoPersis
|
||||
public void setAutoIndexCreation(boolean autoCreateIndexes) {
|
||||
this.autoIndexCreation = autoCreateIndexes;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public MongoPersistentEntity<?> getPersistentEntity(MongoPersistentProperty persistentProperty) {
|
||||
|
||||
MongoPersistentEntity<?> entity = super.getPersistentEntity(persistentProperty);
|
||||
|
||||
if (entity == null || !persistentProperty.isUnwrapped()) {
|
||||
return entity;
|
||||
}
|
||||
|
||||
return new UnwrappedMongoPersistentEntity<>(entity, new UnwrapEntityContext(persistentProperty));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.model.MutablePersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
@@ -24,7 +25,7 @@ import org.springframework.lang.Nullable;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface MongoPersistentEntity<T> extends PersistentEntity<T, MongoPersistentProperty> {
|
||||
public interface MongoPersistentEntity<T> extends MutablePersistentEntity<T, MongoPersistentProperty> {
|
||||
|
||||
/**
|
||||
* Returns the collection the entity shall be persisted to.
|
||||
@@ -93,4 +94,12 @@ public interface MongoPersistentEntity<T> extends PersistentEntity<T, MongoPersi
|
||||
return getShardKey().isSharded();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the entity should be unwrapped.
|
||||
* @since 3.2
|
||||
*/
|
||||
default boolean isUnwrapped() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import org.springframework.lang.Nullable;
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
public interface MongoPersistentProperty extends PersistentProperty<MongoPersistentProperty> {
|
||||
|
||||
@@ -54,6 +55,15 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
int getFieldOrder();
|
||||
|
||||
/**
|
||||
* Returns whether the property should be written to the database if its value is {@literal null}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.3
|
||||
* @see Field.Write
|
||||
*/
|
||||
boolean writeNullValues();
|
||||
|
||||
/**
|
||||
* Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* {@link #getDBRef()} to return an non-{@literal null} value.
|
||||
@@ -62,6 +72,15 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
boolean isDbReference();
|
||||
|
||||
/**
|
||||
* Returns whether the property is a {@link DocumentReference}. If this returns {@literal true} you can expect
|
||||
* {@link #getDocumentReference()} to return an non-{@literal null} value.
|
||||
*
|
||||
* @return
|
||||
* @since 3.3
|
||||
*/
|
||||
boolean isDocumentReference();
|
||||
|
||||
/**
|
||||
* Returns whether the property is explicitly marked as an identifier property of the owning {@link PersistentEntity}.
|
||||
* A property is an explicit id property if it is annotated with @see {@link Id}.
|
||||
@@ -105,6 +124,16 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
@Nullable
|
||||
DBRef getDBRef();
|
||||
|
||||
/**
|
||||
* Returns the {@link DocumentReference} if the property is a reference.
|
||||
*
|
||||
* @see #isDocumentReference()
|
||||
* @return {@literal null} if not present.
|
||||
* @since 3.3
|
||||
*/
|
||||
@Nullable
|
||||
DocumentReference getDocumentReference();
|
||||
|
||||
/**
|
||||
* Returns whether property access shall be used for reading the property value. This means it will use the getter
|
||||
* instead of field access.
|
||||
@@ -123,6 +152,14 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
return field != null ? !FieldType.IMPLICIT.equals(field.targetType()) : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the property should be unwrapped.
|
||||
* @since 3.2
|
||||
*/
|
||||
default boolean isUnwrapped() {
|
||||
return isEntity() && isAnnotationPresent(Unwrapped.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple {@link Converter} implementation to transform a {@link MongoPersistentProperty} into its field name.
|
||||
*
|
||||
@@ -137,7 +174,10 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
* @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object)
|
||||
*/
|
||||
public String convert(MongoPersistentProperty source) {
|
||||
return source.getFieldName();
|
||||
if (!source.isUnwrapped()) {
|
||||
return source.getFieldName();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
|
||||
/**
|
||||
* Identifies a domain object to be persisted to a MongoDB Time Series collection.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
@Inherited
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.TYPE })
|
||||
@Document
|
||||
public @interface TimeSeries {
|
||||
|
||||
/**
|
||||
* The collection the document representing the entity is supposed to be stored in. If not configured, a default
|
||||
* collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically
|
||||
* calculate the collection based on a per operation basis.
|
||||
*
|
||||
* @return the name of the collection to be used.
|
||||
* @see Document#collection()
|
||||
*/
|
||||
@AliasFor(annotation = Document.class, attribute = "collection")
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
* Name of the property which contains the date in each time series document. <br />
|
||||
* Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping
|
||||
* process.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String timeField();
|
||||
|
||||
/**
|
||||
* The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor
|
||||
* {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}. <br />
|
||||
* Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping
|
||||
* process.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String metaField() default "";
|
||||
|
||||
/**
|
||||
* Select the {@link Granularity granularity} parameter to define how data in the time series collection is organized.
|
||||
*
|
||||
* @return {@link Granularity#DEFAULT server default} by default.
|
||||
*/
|
||||
Granularity granularity() default Granularity.DEFAULT;
|
||||
|
||||
/**
|
||||
* Defines the collation to apply when executing a query or creating indexes.
|
||||
*
|
||||
* @return an empty {@link String} by default.
|
||||
* @see Document#collation()
|
||||
*/
|
||||
@AliasFor(annotation = Document.class, attribute = "collation")
|
||||
String collation() default "";
|
||||
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @author Rogério Meneguelli Gatto
|
||||
* @since 3.2
|
||||
*/
|
||||
class UnwrapEntityContext {
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
|
||||
public UnwrapEntityContext(MongoPersistentProperty property) {
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
public MongoPersistentProperty getProperty() {
|
||||
return property;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UnwrapEntityContext that = (UnwrapEntityContext) obj;
|
||||
return ObjectUtils.nullSafeEquals(property, that.property);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return ObjectUtils.nullSafeHashCode(property);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import javax.annotation.meta.When;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
|
||||
/**
|
||||
* The annotation to configure a value object as flattened out in the target document.
|
||||
* <p />
|
||||
* Depending on the {@link OnEmpty value} of {@link #onEmpty()} the property is set to {@literal null} or an empty
|
||||
* instance in the case all unwrapped values are {@literal null} when reading from the result set.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
@Documented
|
||||
@Retention(value = RetentionPolicy.RUNTIME)
|
||||
@Target(value = { ElementType.ANNOTATION_TYPE, ElementType.FIELD, ElementType.METHOD })
|
||||
public @interface Unwrapped {
|
||||
|
||||
/**
|
||||
* Set the load strategy for the unwrapped object if all contained fields yield {@literal null} values.
|
||||
* <p />
|
||||
* {@link Nullable @Unwrapped.Nullable} and {@link Empty @Unwrapped.Empty} offer shortcuts for this.
|
||||
*
|
||||
* @return never {@link} null.
|
||||
*/
|
||||
OnEmpty onEmpty();
|
||||
|
||||
/**
|
||||
* @return prefix for columns in the unwrapped value object. An empty {@link String} by default.
|
||||
*/
|
||||
String prefix() default "";
|
||||
|
||||
/**
|
||||
* Load strategy to be used {@link Unwrapped#onEmpty()}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
enum OnEmpty {
|
||||
USE_NULL, USE_EMPTY
|
||||
}
|
||||
|
||||
/**
|
||||
* Shortcut for a nullable unwrapped property.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Unwrapped.Nullable private Address address;
|
||||
* </pre>
|
||||
*
|
||||
* as alternative to the more verbose
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Unwrapped(onEmpty = USE_NULL) @javax.annotation.Nonnull(when = When.MAYBE) private Address address;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see Unwrapped#onEmpty()
|
||||
*/
|
||||
@Unwrapped(onEmpty = OnEmpty.USE_NULL)
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD, ElementType.METHOD })
|
||||
@javax.annotation.Nonnull(when = When.MAYBE)
|
||||
@interface Nullable {
|
||||
|
||||
/**
|
||||
* @return prefix for columns in the unwrapped value object. An empty {@link String} by default.
|
||||
*/
|
||||
@AliasFor(annotation = Unwrapped.class, attribute = "prefix")
|
||||
String prefix() default "";
|
||||
|
||||
/**
|
||||
* @return value for columns in the unwrapped value object. An empty {@link String} by default.
|
||||
*/
|
||||
@AliasFor(annotation = Unwrapped.class, attribute = "prefix")
|
||||
String value() default "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Shortcut for an empty unwrapped property.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Unwrapped.Empty private Address address;
|
||||
* </pre>
|
||||
*
|
||||
* as alternative to the more verbose
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Unwrapped(onEmpty = USE_EMPTY) @javax.annotation.Nonnull(when = When.NEVER) private Address address;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see Unwrapped#onEmpty()
|
||||
*/
|
||||
@Unwrapped(onEmpty = OnEmpty.USE_EMPTY)
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD, ElementType.METHOD })
|
||||
@javax.annotation.Nonnull(when = When.NEVER)
|
||||
@interface Empty {
|
||||
|
||||
/**
|
||||
* @return prefix for columns in the unwrapped value object. An empty {@link String} by default.
|
||||
*/
|
||||
@AliasFor(annotation = Unwrapped.class, attribute = "prefix")
|
||||
String prefix() default "";
|
||||
|
||||
/**
|
||||
* @return value for columns in the unwrapped value object. An empty {@link String} by default.
|
||||
*/
|
||||
@AliasFor(annotation = Unwrapped.class, attribute = "prefix")
|
||||
String value() default "";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,326 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Spliterator;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.springframework.data.mapping.*;
|
||||
import org.springframework.data.mapping.model.PersistentPropertyAccessorFactory;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.spel.EvaluationContextProvider;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Unwrapped variant of {@link MongoPersistentEntity}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
* @see Unwrapped
|
||||
*/
|
||||
class UnwrappedMongoPersistentEntity<T> implements MongoPersistentEntity<T> {
|
||||
|
||||
private final UnwrapEntityContext context;
|
||||
private final MongoPersistentEntity<T> delegate;
|
||||
|
||||
public UnwrappedMongoPersistentEntity(MongoPersistentEntity<T> delegate, UnwrapEntityContext context) {
|
||||
|
||||
this.context = context;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCollection() {
|
||||
return delegate.getCollection();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLanguage() {
|
||||
return delegate.getLanguage();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public MongoPersistentProperty getTextScoreProperty() {
|
||||
return delegate.getTextScoreProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTextScoreProperty() {
|
||||
return delegate.hasTextScoreProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Collation getCollation() {
|
||||
return delegate.getCollation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasCollation() {
|
||||
return delegate.hasCollation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShardKey getShardKey() {
|
||||
return delegate.getShardKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSharded() {
|
||||
return delegate.isSharded();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return delegate.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public PreferredConstructor<T, MongoPersistentProperty> getPersistenceConstructor() {
|
||||
return delegate.getPersistenceConstructor();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isConstructorArgument(PersistentProperty<?> property) {
|
||||
return delegate.isConstructorArgument(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdProperty(PersistentProperty<?> property) {
|
||||
return delegate.isIdProperty(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isVersionProperty(PersistentProperty<?> property) {
|
||||
return delegate.isVersionProperty(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public MongoPersistentProperty getIdProperty() {
|
||||
return delegate.getIdProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getRequiredIdProperty() {
|
||||
return delegate.getRequiredIdProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public MongoPersistentProperty getVersionProperty() {
|
||||
return delegate.getVersionProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getRequiredVersionProperty() {
|
||||
return delegate.getRequiredVersionProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public MongoPersistentProperty getPersistentProperty(String name) {
|
||||
return wrap(delegate.getPersistentProperty(name));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getRequiredPersistentProperty(String name) {
|
||||
|
||||
MongoPersistentProperty persistentProperty = getPersistentProperty(name);
|
||||
if (persistentProperty != null) {
|
||||
return persistentProperty;
|
||||
}
|
||||
|
||||
throw new IllegalStateException(String.format("Required property %s not found for %s!", name, getType()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public MongoPersistentProperty getPersistentProperty(Class<? extends Annotation> annotationType) {
|
||||
return wrap(delegate.getPersistentProperty(annotationType));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<MongoPersistentProperty> getPersistentProperties(Class<? extends Annotation> annotationType) {
|
||||
return Streamable.of(delegate.getPersistentProperties(annotationType)).stream().map(this::wrap)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasIdProperty() {
|
||||
return delegate.hasIdProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasVersionProperty() {
|
||||
return delegate.hasVersionProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getType() {
|
||||
return delegate.getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Alias getTypeAlias() {
|
||||
return delegate.getTypeAlias();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeInformation<T> getTypeInformation() {
|
||||
return delegate.getTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithProperties(PropertyHandler<MongoPersistentProperty> handler) {
|
||||
|
||||
delegate.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> {
|
||||
handler.doWithPersistentProperty(wrap(property));
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithProperties(SimplePropertyHandler handler) {
|
||||
|
||||
delegate.doWithProperties((SimplePropertyHandler) property -> {
|
||||
if (property instanceof MongoPersistentProperty) {
|
||||
handler.doWithPersistentProperty(wrap((MongoPersistentProperty) property));
|
||||
} else {
|
||||
handler.doWithPersistentProperty(property);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithAssociations(AssociationHandler<MongoPersistentProperty> handler) {
|
||||
delegate.doWithAssociations(handler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doWithAssociations(SimpleAssociationHandler handler) {
|
||||
delegate.doWithAssociations(handler);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public <A extends Annotation> A findAnnotation(Class<A> annotationType) {
|
||||
return delegate.findAnnotation(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <A extends Annotation> A getRequiredAnnotation(Class<A> annotationType) throws IllegalStateException {
|
||||
return delegate.getRequiredAnnotation(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <A extends Annotation> boolean isAnnotationPresent(Class<A> annotationType) {
|
||||
return delegate.isAnnotationPresent(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <B> PersistentPropertyAccessor<B> getPropertyAccessor(B bean) {
|
||||
return delegate.getPropertyAccessor(bean);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <B> PersistentPropertyPathAccessor<B> getPropertyPathAccessor(B bean) {
|
||||
return delegate.getPropertyPathAccessor(bean);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdentifierAccessor getIdentifierAccessor(Object bean) {
|
||||
return delegate.getIdentifierAccessor(bean);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isNew(Object bean) {
|
||||
return delegate.isNew(bean);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isImmutable() {
|
||||
return delegate.isImmutable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean requiresPropertyPopulation() {
|
||||
return delegate.requiresPropertyPopulation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<MongoPersistentProperty> iterator() {
|
||||
|
||||
List<MongoPersistentProperty> target = new ArrayList<>();
|
||||
delegate.iterator().forEachRemaining(it -> target.add(wrap(it)));
|
||||
return target.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(Consumer<? super MongoPersistentProperty> action) {
|
||||
delegate.forEach(it -> action.accept(wrap(it)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<MongoPersistentProperty> spliterator() {
|
||||
return delegate.spliterator();
|
||||
}
|
||||
|
||||
private MongoPersistentProperty wrap(MongoPersistentProperty source) {
|
||||
if (source == null) {
|
||||
return source;
|
||||
}
|
||||
return new UnwrappedMongoPersistentProperty(source, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addPersistentProperty(MongoPersistentProperty property) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addAssociation(Association<MongoPersistentProperty> association) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void verify() throws MappingException {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPersistentPropertyAccessorFactory(PersistentPropertyAccessorFactory factory) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setEvaluationContextProvider(EvaluationContextProvider provider) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUnwrapped() {
|
||||
return context.getProperty().isUnwrapped();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,369 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Unwrapped variant of {@link MongoPersistentProperty}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Rogério Meneguelli Gatto
|
||||
* @since 3.2
|
||||
* @see Unwrapped
|
||||
*/
|
||||
class UnwrappedMongoPersistentProperty implements MongoPersistentProperty {
|
||||
|
||||
private final MongoPersistentProperty delegate;
|
||||
private final UnwrapEntityContext context;
|
||||
|
||||
public UnwrappedMongoPersistentProperty(MongoPersistentProperty delegate, UnwrapEntityContext context) {
|
||||
|
||||
this.delegate = delegate;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFieldName() {
|
||||
|
||||
if (!context.getProperty().isUnwrapped()) {
|
||||
return delegate.getFieldName();
|
||||
}
|
||||
|
||||
return context.getProperty().findAnnotation(Unwrapped.class).prefix() + delegate.getFieldName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getFieldType() {
|
||||
return delegate.getFieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFieldOrder() {
|
||||
return delegate.getFieldOrder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean writeNullValues() {
|
||||
return delegate.writeNullValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDbReference() {
|
||||
return delegate.isDbReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDocumentReference() {
|
||||
return delegate.isDocumentReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isExplicitIdProperty() {
|
||||
return delegate.isExplicitIdProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLanguageProperty() {
|
||||
return delegate.isLanguageProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isExplicitLanguageProperty() {
|
||||
return delegate.isExplicitLanguageProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isTextScoreProperty() {
|
||||
return delegate.isTextScoreProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public DBRef getDBRef() {
|
||||
return delegate.getDBRef();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public DocumentReference getDocumentReference() {
|
||||
return delegate.getDocumentReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean usePropertyAccess() {
|
||||
return delegate.usePropertyAccess();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasExplicitWriteTarget() {
|
||||
return delegate.hasExplicitWriteTarget();
|
||||
}
|
||||
|
||||
@Override
|
||||
public PersistentEntity<?, MongoPersistentProperty> getOwner() {
|
||||
return delegate.getOwner();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return delegate.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getType() {
|
||||
return delegate.getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeInformation<?> getTypeInformation() {
|
||||
return delegate.getTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<? extends TypeInformation<?>> getPersistentEntityTypes() {
|
||||
return delegate.getPersistentEntityTypes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<? extends TypeInformation<?>> getPersistentEntityTypeInformation() {
|
||||
return delegate.getPersistentEntityTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Method getGetter() {
|
||||
return delegate.getGetter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Method getRequiredGetter() {
|
||||
return delegate.getRequiredGetter();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Method getSetter() {
|
||||
return delegate.getSetter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Method getRequiredSetter() {
|
||||
return delegate.getRequiredSetter();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Method getWither() {
|
||||
return delegate.getWither();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Method getRequiredWither() {
|
||||
return delegate.getRequiredWither();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Field getField() {
|
||||
return delegate.getField();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Field getRequiredField() {
|
||||
return delegate.getRequiredField();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public String getSpelExpression() {
|
||||
return delegate.getSpelExpression();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Association<MongoPersistentProperty> getAssociation() {
|
||||
return delegate.getAssociation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Association<MongoPersistentProperty> getRequiredAssociation() {
|
||||
return delegate.getRequiredAssociation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEntity() {
|
||||
return delegate.isEntity();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdProperty() {
|
||||
return delegate.isIdProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isVersionProperty() {
|
||||
return delegate.isVersionProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCollectionLike() {
|
||||
return delegate.isCollectionLike();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMap() {
|
||||
return delegate.isMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isArray() {
|
||||
return delegate.isArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isTransient() {
|
||||
return delegate.isTransient();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWritable() {
|
||||
return delegate.isWritable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isImmutable() {
|
||||
return delegate.isImmutable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAssociation() {
|
||||
return delegate.isAssociation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUnwrapped() {
|
||||
return delegate.isUnwrapped();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Class<?> getComponentType() {
|
||||
return delegate.getComponentType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getRawType() {
|
||||
return delegate.getRawType();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Class<?> getMapValueType() {
|
||||
return delegate.getMapValueType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getActualType() {
|
||||
return delegate.getActualType();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public <A extends Annotation> A findAnnotation(Class<A> annotationType) {
|
||||
return delegate.findAnnotation(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <A extends Annotation> A getRequiredAnnotation(Class<A> annotationType) throws IllegalStateException {
|
||||
return delegate.getRequiredAnnotation(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public <A extends Annotation> A findPropertyOrOwnerAnnotation(Class<A> annotationType) {
|
||||
return delegate.findPropertyOrOwnerAnnotation(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAnnotationPresent(Class<? extends Annotation> annotationType) {
|
||||
return delegate.isAnnotationPresent(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasActualTypeAnnotation(Class<? extends Annotation> annotationType) {
|
||||
return delegate.hasActualTypeAnnotation(annotationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Class<?> getAssociationTargetType() {
|
||||
return delegate.getAssociationTargetType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeInformation<?> getAssociationTargetTypeInformation() {
|
||||
return delegate.getAssociationTargetTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> PersistentPropertyAccessor<T> getAccessorForOwner(T owner) {
|
||||
return delegate.getAccessorForOwner(owner);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UnwrappedMongoPersistentProperty that = (UnwrappedMongoPersistentProperty) obj;
|
||||
if (!ObjectUtils.nullSafeEquals(delegate, that.delegate)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(context, that.context);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
int result = ObjectUtils.nullSafeHashCode(delegate);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(context);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -20,13 +20,16 @@ import static org.springframework.util.ObjectUtils.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.bson.BsonType;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.Binary;
|
||||
import org.springframework.data.domain.Example;
|
||||
@@ -50,7 +53,7 @@ import com.mongodb.BasicDBList;
|
||||
|
||||
/**
|
||||
* Central class for creating queries. It follows a fluent API style so that you can easily chain together multiple
|
||||
* criteria. Static import of the 'Criteria.where' method will improve readability.
|
||||
* criteria. Static import of the {@link Criteria#where Criteria.where} method improves readability.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
* @author Oliver Gierke
|
||||
@@ -58,6 +61,8 @@ import com.mongodb.BasicDBList;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Andreas Zink
|
||||
* @author Ziemowit Stolarczyk
|
||||
* @author Clément Petit
|
||||
*/
|
||||
public class Criteria implements CriteriaDefinition {
|
||||
|
||||
@@ -124,7 +129,12 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to create a {@link Criteria} matching an example object.
|
||||
* Static factory method to create a {@link Criteria} matching an example object. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] } </code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @param example must not be {@literal null}.
|
||||
* @return new instance of {@link Criteria}.
|
||||
@@ -179,6 +189,42 @@ public class Criteria implements CriteriaDefinition {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using {@literal null} equality comparison which matches documents that either contain the item
|
||||
* field whose value is {@literal null} or that do not contain the item field.
|
||||
* <p />
|
||||
* Use {@link #isNullValue()} to only query for documents that contain the field whose value is equal to
|
||||
* {@link org.bson.BsonType#NULL}. <br />
|
||||
* Use {@link #exists(boolean)} to query for documents that do (not) contain the field.
|
||||
*
|
||||
* @return this.
|
||||
* @see <a href="https://docs.mongodb.com/manual/tutorial/query-for-null-fields/#equality-filter">Query for Null or
|
||||
* Missing Fields: Equality Filter</a>
|
||||
* @since 3.3
|
||||
*/
|
||||
public Criteria isNull() {
|
||||
return is(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using a {@link org.bson.BsonType} comparison which matches only documents that contain the item
|
||||
* field whose value is equal to {@link org.bson.BsonType#NULL}.
|
||||
* <p />
|
||||
* Use {@link #isNull()} to query for documents that contain the field with a {@literal null} value or do not contain the
|
||||
* field at all. <br />
|
||||
* Use {@link #exists(boolean)} to query for documents that do (not) contain the field.
|
||||
*
|
||||
* @return this.
|
||||
* @see <a href="https://docs.mongodb.com/manual/tutorial/query-for-null-fields/#type-check">Query for Null or Missing
|
||||
* Fields: Type Check</a>
|
||||
* @since 3.3
|
||||
*/
|
||||
public Criteria isNullValue() {
|
||||
|
||||
criteria.put("$type", BsonType.NULL.getValue());
|
||||
return this;
|
||||
}
|
||||
|
||||
private boolean lastOperatorWasNot() {
|
||||
return !this.criteria.isEmpty() && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1]);
|
||||
}
|
||||
@@ -382,7 +428,22 @@ public class Criteria implements CriteriaDefinition {
|
||||
Assert.notNull(types, "Types must not be null!");
|
||||
Assert.noNullElements(types, "Types must not contain null.");
|
||||
|
||||
criteria.put("$type", Arrays.asList(types).stream().map(Type::value).collect(Collectors.toList()));
|
||||
return type(Arrays.asList(types));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criterion using the {@literal $type} operator.
|
||||
*
|
||||
* @param types must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/query/type/">MongoDB Query operator: $type</a>
|
||||
*/
|
||||
public Criteria type(Collection<Type> types) {
|
||||
|
||||
Assert.notNull(types, "Types must not be null!");
|
||||
|
||||
criteria.put("$type", types.stream().map(Type::value).collect(Collectors.toList()));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -615,8 +676,15 @@ public class Criteria implements CriteriaDefinition {
|
||||
*/
|
||||
public Criteria alike(Example<?> sample) {
|
||||
|
||||
criteria.put("$example", sample);
|
||||
return this;
|
||||
if (StringUtils.hasText(this.getKey())) {
|
||||
|
||||
criteria.put("$example", sample);
|
||||
return this;
|
||||
}
|
||||
|
||||
Criteria exampleCriteria = new Criteria();
|
||||
exampleCriteria.criteria.put("$example", sample);
|
||||
return registerCriteriaChainElement(exampleCriteria);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -656,46 +724,103 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an 'or' criteria using the $or operator for all of the provided criteria
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $or operator to be wrapped in a $not operator.
|
||||
* Creates a criteria using the {@code $or} operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #orOperator(Criteria...)} follows a not() call directly.
|
||||
* @throws IllegalArgumentException if this method follows a {@link #not()} call directly.
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public Criteria orOperator(Criteria... criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
|
||||
return orOperator(Arrays.asList(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criteria using the {@code $or} operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator.
|
||||
*
|
||||
* @throws IllegalArgumentException if this method follows a {@link #not()} call directly.
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Criteria orOperator(Collection<Criteria> criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
return registerCriteriaChainElement(new Criteria("$or").is(bsonList));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a 'nor' criteria using the $nor operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $nor operator to be wrapped in a $not operator.
|
||||
* Creates a criteria using the {@code $nor} operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #norOperator(Criteria...)} follows a not() call directly.
|
||||
* @throws IllegalArgumentException if this method follows a {@link #not()} call directly.
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public Criteria norOperator(Criteria... criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
|
||||
return norOperator(Arrays.asList(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criteria using the {@code $nor} operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that MongoDB doesn't support an {@code $nor} operator to be wrapped in a {@code $not} operator.
|
||||
*
|
||||
* @throws IllegalArgumentException if this method follows a {@link #not()} call directly.
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Criteria norOperator(Collection<Criteria> criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
return registerCriteriaChainElement(new Criteria("$nor").is(bsonList));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an 'and' criteria using the $and operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that mongodb doesn't support an $and operator to be wrapped in a $not operator.
|
||||
* Creates a criteria using the {@code $and} operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that MongoDB doesn't support an {@code $and} operator to be wrapped in a {@code $not} operator.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@link #andOperator(Criteria...)} follows a not() call directly.
|
||||
* @throws IllegalArgumentException if this method follows a {@link #not()} call directly.
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public Criteria andOperator(Criteria... criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
|
||||
return andOperator(Arrays.asList(criteria));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a criteria using the {@code $and} operator for all of the provided criteria.
|
||||
* <p>
|
||||
* Note that MongoDB doesn't support an {@code $and} operator to be wrapped in a {@code $not} operator.
|
||||
*
|
||||
* @throws IllegalArgumentException if this method follows a {@link #not()} call directly.
|
||||
* @param criteria must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Criteria andOperator(Collection<Criteria> criteria) {
|
||||
|
||||
Assert.notNull(criteria, "Criteria must not be null!");
|
||||
|
||||
BasicDBList bsonList = createCriteriaList(criteria);
|
||||
return registerCriteriaChainElement(new Criteria("$and").is(bsonList));
|
||||
}
|
||||
@@ -789,7 +914,7 @@ public class Criteria implements CriteriaDefinition {
|
||||
return queryCriteria;
|
||||
}
|
||||
|
||||
private BasicDBList createCriteriaList(Criteria[] criteria) {
|
||||
private BasicDBList createCriteriaList(Collection<Criteria> criteria) {
|
||||
BasicDBList bsonList = new BasicDBList();
|
||||
for (Criteria c : criteria) {
|
||||
bsonList.add(c.getCriteriaObject());
|
||||
@@ -798,11 +923,13 @@ public class Criteria implements CriteriaDefinition {
|
||||
}
|
||||
|
||||
private void setValue(Document document, String key, Object value) {
|
||||
|
||||
Object existing = document.get(key);
|
||||
|
||||
if (existing == null) {
|
||||
document.put(key, value);
|
||||
} else {
|
||||
throw new InvalidMongoDbApiUsageException("Due to limitations of the com.mongodb.BasicDocument, "
|
||||
throw new InvalidMongoDbApiUsageException("Due to limitations of the org.bson.Document, "
|
||||
+ "you can't add a second '" + key + "' expression specified as '" + key + " : " + value + "'. "
|
||||
+ "Criteria already contains '" + key + " : " + existing + "'.");
|
||||
}
|
||||
@@ -883,15 +1010,15 @@ public class Criteria implements CriteriaDefinition {
|
||||
* @param right
|
||||
* @return
|
||||
*/
|
||||
private boolean isEqual(Object left, Object right) {
|
||||
private boolean isEqual(@Nullable Object left, @Nullable Object right) {
|
||||
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
}
|
||||
|
||||
if (Pattern.class.isInstance(left)) {
|
||||
if (left instanceof Pattern) {
|
||||
|
||||
if (!Pattern.class.isInstance(right)) {
|
||||
if (!(right instanceof Pattern)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -902,6 +1029,52 @@ public class Criteria implements CriteriaDefinition {
|
||||
&& leftPattern.flags() == rightPattern.flags();
|
||||
}
|
||||
|
||||
if (left instanceof Document) {
|
||||
|
||||
if (!(right instanceof Document)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Document leftDocument = (Document) left;
|
||||
Document rightDocument = (Document) right;
|
||||
Iterator<Entry<String, Object>> leftIterator = leftDocument.entrySet().iterator();
|
||||
Iterator<Entry<String, Object>> rightIterator = rightDocument.entrySet().iterator();
|
||||
|
||||
while (leftIterator.hasNext() && rightIterator.hasNext()) {
|
||||
|
||||
Map.Entry<String, Object> leftEntry = leftIterator.next();
|
||||
Map.Entry<String, Object> rightEntry = rightIterator.next();
|
||||
|
||||
if (!isEqual(leftEntry.getKey(), rightEntry.getKey())
|
||||
|| !isEqual(leftEntry.getValue(), rightEntry.getValue())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return !leftIterator.hasNext() && !rightIterator.hasNext();
|
||||
}
|
||||
|
||||
if (Collection.class.isAssignableFrom(left.getClass())) {
|
||||
|
||||
if (!Collection.class.isAssignableFrom(right.getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Collection<?> leftCollection = (Collection<?>) left;
|
||||
Collection<?> rightCollection = (Collection<?>) right;
|
||||
Iterator<?> leftIterator = leftCollection.iterator();
|
||||
Iterator<?> rightIterator = rightCollection.iterator();
|
||||
|
||||
while (leftIterator.hasNext() && rightIterator.hasNext()) {
|
||||
|
||||
if (!isEqual(leftIterator.next(), rightIterator.next())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return !leftIterator.hasNext() && !rightIterator.hasNext();
|
||||
}
|
||||
|
||||
return ObjectUtils.nullSafeEquals(left, right);
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.MongoExpression;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -37,7 +37,7 @@ import org.springframework.util.ObjectUtils;
|
||||
*/
|
||||
public class Field {
|
||||
|
||||
private final Map<String, Integer> criteria = new HashMap<>();
|
||||
private final Map<String, Object> criteria = new HashMap<>();
|
||||
private final Map<String, Object> slices = new HashMap<>();
|
||||
private final Map<String, Criteria> elemMatchs = new HashMap<>();
|
||||
private @Nullable String positionKey;
|
||||
@@ -58,6 +58,62 @@ public class Field {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Project a given {@link MongoExpression} to a {@link FieldProjectionExpression#as(String) field} included in the
|
||||
* result.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* // { 'name' : { '$toUpper' : '$name' } }
|
||||
*
|
||||
* // native MongoDB expression
|
||||
* .project(MongoExpression.expressionFromString("'$toUpper' : '$name'")).as("name");
|
||||
*
|
||||
* // Aggregation Framework expression
|
||||
* .project(StringOperators.valueOf("name").toUpper()).as("name");
|
||||
*
|
||||
* // Aggregation Framework SpEL expression
|
||||
* .project(AggregationSpELExpression.expressionOf("toUpper(name)")).as("name");
|
||||
* </pre>
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link FieldProjectionExpression}. Define the target field name through
|
||||
* {@link FieldProjectionExpression#as(String) as(String)}.
|
||||
* @since 3.2
|
||||
*/
|
||||
public FieldProjectionExpression project(MongoExpression expression) {
|
||||
return field -> Field.this.projectAs(expression, field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Project a given {@link MongoExpression} to a {@link FieldProjectionExpression#as(String) field} included in the
|
||||
* result.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* // { 'name' : { '$toUpper' : '$name' } }
|
||||
*
|
||||
* // native MongoDB expression
|
||||
* .projectAs(MongoExpression.expressionFromString("'$toUpper' : '$name'"), "name");
|
||||
*
|
||||
* // Aggregation Framework expression
|
||||
* .projectAs(StringOperators.valueOf("name").toUpper(), "name");
|
||||
*
|
||||
* // Aggregation Framework SpEL expression
|
||||
* .projectAs(AggregationSpELExpression.expressionOf("toUpper(name)"), "name");
|
||||
* </pre>
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param field the field name used in the result.
|
||||
* @return new instance of {@link FieldProjectionExpression}.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Field projectAs(MongoExpression expression, String field) {
|
||||
|
||||
criteria.put(field, expression);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include one or more {@code fields} to be returned by the query operation.
|
||||
*
|
||||
@@ -166,8 +222,7 @@ public class Field {
|
||||
|
||||
public Document getFieldsObject() {
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
Document document = new Document((Map) criteria);
|
||||
Document document = new Document(criteria);
|
||||
|
||||
for (Entry<String, Object> entry : slices.entrySet()) {
|
||||
document.put(entry.getKey(), new Document("$slice", entry.getValue()));
|
||||
@@ -219,4 +274,21 @@ public class Field {
|
||||
result = 31 * result + positionValue;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Intermediate builder part for projecting a {@link MongoExpression} to a result field.
|
||||
*
|
||||
* @since 3.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface FieldProjectionExpression {
|
||||
|
||||
/**
|
||||
* Set the name to be used in the result and return a {@link Field}.
|
||||
*
|
||||
* @param name must not be {@literal null}.
|
||||
* @return the calling instance {@link Field}.
|
||||
*/
|
||||
Field as(String name);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.query;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
@@ -102,6 +103,15 @@ public enum MongoRegexCreator {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source
|
||||
* @return
|
||||
* @since 2.2.14
|
||||
*/
|
||||
public Object toCaseInsensitiveMatch(Object source) {
|
||||
return source instanceof String ? new BsonRegularExpression(Pattern.quote((String) source), "i") : source;
|
||||
}
|
||||
|
||||
private String prepareAndEscapeStringBeforeApplyingLikeRegex(String source, MatchMode matcherType) {
|
||||
|
||||
if (MatchMode.REGEX == matcherType) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -13,31 +13,33 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import com.querydsl.core.types.Operator;
|
||||
package org.springframework.data.mongodb.core.timeseries;
|
||||
|
||||
/**
|
||||
* Spring Data specific {@link Operator operators} for usage with Querydsl and MongoDB.
|
||||
* {@link GranularityDefinition Granularities} available for Time Series data.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @since 3.3
|
||||
*/
|
||||
enum QuerydslMongoOps implements Operator {
|
||||
public enum Granularity implements GranularityDefinition {
|
||||
|
||||
/**
|
||||
* {@link Operator} always evaluating to {@literal false}.
|
||||
* Server default value to indicate no explicit value should be sent.
|
||||
*/
|
||||
NO_MATCH(Boolean.class);
|
||||
DEFAULT,
|
||||
|
||||
private final Class<?> type;
|
||||
/**
|
||||
* High frequency ingestion.
|
||||
*/
|
||||
SECONDS,
|
||||
|
||||
QuerydslMongoOps(Class<?> type) {
|
||||
this.type = type;
|
||||
}
|
||||
/**
|
||||
* Medium frequency ingestion.
|
||||
*/
|
||||
MINUTES,
|
||||
|
||||
@Override
|
||||
public Class<?> getType() {
|
||||
return type;
|
||||
}
|
||||
/**
|
||||
* Low frequency ingestion.
|
||||
*/
|
||||
HOURS
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.timeseries;
|
||||
|
||||
/**
|
||||
* The Granularity of time series data that is closest to the time span between incoming measurements.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface GranularityDefinition {
|
||||
|
||||
String name();
|
||||
}
|
||||
@@ -78,16 +78,31 @@ public interface MongoRepository<T, ID> extends PagingAndSortingRepository<T, ID
|
||||
*/
|
||||
<S extends T> List<S> insert(Iterable<S> entities);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link List} is
|
||||
* returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> List<S> findAll(Example<S> example);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example, org.springframework.data.domain.Sort)
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be
|
||||
* found an empty {@link List} is returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.QueryByExampleExecutor#findAll(org.springframework.data.domain.Example,
|
||||
* org.springframework.data.domain.Sort)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> List<S> findAll(Example<S> example, Sort sort);
|
||||
|
||||
@@ -64,4 +64,33 @@ public interface ReactiveMongoRepository<T, ID> extends ReactiveSortingRepositor
|
||||
*/
|
||||
<S extends T> Flux<S> insert(Publisher<S> entities);
|
||||
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example}. In case no match could be found an empty {@link Flux} is
|
||||
* returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> Flux<S> findAll(Example<S> example);
|
||||
|
||||
/**
|
||||
* Returns all entities matching the given {@link Example} applying the given {@link Sort}. In case no match could be
|
||||
* found an empty {@link Flux} is returned. <br />
|
||||
* By default the {@link Example} uses typed matching restricting it to probe assignable types. For example, when
|
||||
* sticking with the default type key ({@code _class}), the query has restrictions such as
|
||||
* <code>_class : { $in : [com.acme.Person] }</code>. <br />
|
||||
* To avoid the above mentioned type restriction use an {@link org.springframework.data.mongodb.core.query.UntypedExampleMatcher} with
|
||||
* {@link Example#of(Object, org.springframework.data.domain.ExampleMatcher)}.
|
||||
*
|
||||
* @see org.springframework.data.repository.query.ReactiveQueryByExampleExecutor#findAll(org.springframework.data.domain.Example,
|
||||
* org.springframework.data.domain.Sort)
|
||||
*/
|
||||
@Override
|
||||
<S extends T> Flux<S> findAll(Example<S> example, Sort sort);
|
||||
|
||||
}
|
||||
|
||||
@@ -18,6 +18,8 @@ package org.springframework.data.mongodb.repository.query;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.IntUnaryOperator;
|
||||
import java.util.function.LongUnaryOperator;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -33,6 +35,7 @@ import org.springframework.data.repository.query.QueryMethodEvaluationContextPro
|
||||
import org.springframework.expression.ExpressionParser;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -41,6 +44,7 @@ import org.springframework.util.StringUtils;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Divya Srivastava
|
||||
* @since 2.2
|
||||
*/
|
||||
abstract class AggregationUtils {
|
||||
@@ -132,6 +136,22 @@ abstract class AggregationUtils {
|
||||
*/
|
||||
static void appendLimitAndOffsetIfPresent(List<AggregationOperation> aggregationPipeline,
|
||||
ConvertingParameterAccessor accessor) {
|
||||
appendLimitAndOffsetIfPresent(aggregationPipeline, accessor, LongUnaryOperator.identity(),
|
||||
IntUnaryOperator.identity());
|
||||
}
|
||||
|
||||
/**
|
||||
* Append {@code $skip} and {@code $limit} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is
|
||||
* present.
|
||||
*
|
||||
* @param aggregationPipeline
|
||||
* @param accessor
|
||||
* @param offsetOperator
|
||||
* @param limitOperator
|
||||
* @since 3.3
|
||||
*/
|
||||
static void appendLimitAndOffsetIfPresent(List<AggregationOperation> aggregationPipeline,
|
||||
ConvertingParameterAccessor accessor, LongUnaryOperator offsetOperator, IntUnaryOperator limitOperator) {
|
||||
|
||||
Pageable pageable = accessor.getPageable();
|
||||
if (pageable.isUnpaged()) {
|
||||
@@ -139,10 +159,10 @@ abstract class AggregationUtils {
|
||||
}
|
||||
|
||||
if (pageable.getOffset() > 0) {
|
||||
aggregationPipeline.add(Aggregation.skip(pageable.getOffset()));
|
||||
aggregationPipeline.add(Aggregation.skip(offsetOperator.applyAsLong(pageable.getOffset())));
|
||||
}
|
||||
|
||||
aggregationPipeline.add(Aggregation.limit(pageable.getPageSize()));
|
||||
aggregationPipeline.add(Aggregation.limit(limitOperator.applyAsInt(pageable.getPageSize())));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -163,9 +183,9 @@ abstract class AggregationUtils {
|
||||
* @throws IllegalArgumentException when none of the above rules is met.
|
||||
*/
|
||||
@Nullable
|
||||
static <T> T extractSimpleTypeResult(Document source, Class<T> targetType, MongoConverter converter) {
|
||||
static <T> T extractSimpleTypeResult(@Nullable Document source, Class<T> targetType, MongoConverter converter) {
|
||||
|
||||
if (source.isEmpty()) {
|
||||
if (ObjectUtils.isEmpty(source)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ import java.util.regex.Pattern;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.domain.Range.Bound;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -51,8 +50,10 @@ import org.springframework.data.repository.query.parser.Part;
|
||||
import org.springframework.data.repository.query.parser.Part.IgnoreCaseType;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Custom query creator to create Mongo criterias.
|
||||
@@ -196,9 +197,9 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
case IS_NULL:
|
||||
return criteria.is(null);
|
||||
case NOT_IN:
|
||||
return criteria.nin(nextAsArray(parameters));
|
||||
return criteria.nin(nextAsList(parameters, part));
|
||||
case IN:
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsList(parameters, part));
|
||||
case LIKE:
|
||||
case STARTING_WITH:
|
||||
case ENDING_WITH:
|
||||
@@ -337,7 +338,7 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
Iterator<Object> parameters) {
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return criteria.in(nextAsArray(parameters));
|
||||
return criteria.in(nextAsList(parameters, part));
|
||||
}
|
||||
|
||||
return addAppropriateLikeRegexTo(criteria, part, parameters.next());
|
||||
@@ -400,17 +401,24 @@ class MongoQueryCreator extends AbstractQueryCreator<Query, Criteria> {
|
||||
String.format("Expected parameter type of %s but got %s!", type, parameter.getClass()));
|
||||
}
|
||||
|
||||
private Object[] nextAsArray(Iterator<Object> iterator) {
|
||||
private java.util.List<?> nextAsList(Iterator<Object> iterator, Part part) {
|
||||
|
||||
Object next = iterator.next();
|
||||
|
||||
if (next instanceof Collection) {
|
||||
return ((Collection<?>) next).toArray();
|
||||
} else if (next != null && next.getClass().isArray()) {
|
||||
return (Object[]) next;
|
||||
Streamable<?> streamable = asStreamable(iterator.next());
|
||||
if (!isSimpleComparisionPossible(part)) {
|
||||
streamable = streamable.map(MongoRegexCreator.INSTANCE::toCaseInsensitiveMatch);
|
||||
}
|
||||
|
||||
return new Object[] { next };
|
||||
return streamable.toList();
|
||||
}
|
||||
|
||||
private Streamable<?> asStreamable(Object value) {
|
||||
|
||||
if (value instanceof Collection) {
|
||||
return Streamable.of((Collection<?>) value);
|
||||
} else if (ObjectUtils.isArray(value)) {
|
||||
return Streamable.of((Object[]) value);
|
||||
}
|
||||
return Streamable.of(value);
|
||||
}
|
||||
|
||||
private String toLikeRegex(String source, Part part) {
|
||||
|
||||
@@ -17,9 +17,13 @@ package org.springframework.data.mongodb.repository.query;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.function.LongUnaryOperator;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mongodb.InvalidMongoDbApiUsageException;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
@@ -40,7 +44,12 @@ import org.springframework.expression.ExpressionParser;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link AbstractMongoQuery} implementation to run string-based aggregations using
|
||||
* {@link org.springframework.data.mongodb.repository.Aggregation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Divya Srivastava
|
||||
* @author Mark Paluch
|
||||
* @since 2.2
|
||||
*/
|
||||
public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
@@ -62,6 +71,12 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
ExpressionParser expressionParser, QueryMethodEvaluationContextProvider evaluationContextProvider) {
|
||||
super(method, mongoOperations, expressionParser, evaluationContextProvider);
|
||||
|
||||
if (method.isPageQuery()) {
|
||||
throw new InvalidMongoDbApiUsageException(String.format(
|
||||
"Repository aggregation method '%s' does not support '%s' return type. Please use 'Slice' or 'List' instead.",
|
||||
method.getName(), method.getReturnType().getType().getSimpleName()));
|
||||
}
|
||||
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.mongoConverter = mongoOperations.getConverter();
|
||||
this.expressionParser = expressionParser;
|
||||
@@ -76,18 +91,18 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProcessor,
|
||||
ConvertingParameterAccessor accessor, Class<?> typeToRead) {
|
||||
|
||||
if (method.isPageQuery() || method.isSliceQuery()) {
|
||||
throw new InvalidMongoDbApiUsageException(String.format(
|
||||
"Repository aggregation method '%s' does not support '%s' return type. Please use eg. 'List' instead.",
|
||||
method.getName(), method.getReturnType().getType().getSimpleName()));
|
||||
}
|
||||
|
||||
Class<?> sourceType = method.getDomainClass();
|
||||
Class<?> targetType = typeToRead;
|
||||
|
||||
List<AggregationOperation> pipeline = computePipeline(method, accessor);
|
||||
AggregationUtils.appendSortIfPresent(pipeline, accessor, typeToRead);
|
||||
AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor);
|
||||
|
||||
if (method.isSliceQuery()) {
|
||||
AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor, LongUnaryOperator.identity(),
|
||||
limit -> limit + 1);
|
||||
} else {
|
||||
AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor);
|
||||
}
|
||||
|
||||
boolean isSimpleReturnType = isSimpleReturnType(typeToRead);
|
||||
boolean isRawAggregationResult = ClassUtils.isAssignable(AggregationResults.class, typeToRead);
|
||||
@@ -95,28 +110,43 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
if (isSimpleReturnType) {
|
||||
targetType = Document.class;
|
||||
} else if (isRawAggregationResult) {
|
||||
|
||||
// 🙈
|
||||
targetType = method.getReturnType().getRequiredActualType().getRequiredComponentType().getType();
|
||||
}
|
||||
|
||||
AggregationOptions options = computeOptions(method, accessor);
|
||||
TypedAggregation<?> aggregation = new TypedAggregation<>(sourceType, pipeline, options);
|
||||
|
||||
AggregationResults<?> result = mongoOperations.aggregate(aggregation, targetType);
|
||||
if (method.isStreamQuery()) {
|
||||
|
||||
Stream<?> stream = mongoOperations.aggregateStream(aggregation, targetType).stream();
|
||||
|
||||
if (isSimpleReturnType) {
|
||||
return stream.map(it -> AggregationUtils.extractSimpleTypeResult((Document) it, typeToRead, mongoConverter));
|
||||
}
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
AggregationResults<Object> result = (AggregationResults<Object>) mongoOperations.aggregate(aggregation, targetType);
|
||||
|
||||
if (isRawAggregationResult) {
|
||||
return result;
|
||||
}
|
||||
|
||||
List<Object> results = result.getMappedResults();
|
||||
if (method.isCollectionQuery()) {
|
||||
return isSimpleReturnType ? convertResults(typeToRead, results) : results;
|
||||
}
|
||||
|
||||
if (isSimpleReturnType) {
|
||||
if (method.isSliceQuery()) {
|
||||
|
||||
return result.getMappedResults().stream()
|
||||
.map(it -> AggregationUtils.extractSimpleTypeResult((Document) it, typeToRead, mongoConverter))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
return result.getMappedResults();
|
||||
Pageable pageable = accessor.getPageable();
|
||||
int pageSize = pageable.getPageSize();
|
||||
List<Object> resultsToUse = isSimpleReturnType ? convertResults(typeToRead, results) : results;
|
||||
boolean hasNext = resultsToUse.size() > pageSize;
|
||||
return new SliceImpl<>(hasNext ? resultsToUse.subList(0, pageSize) : resultsToUse, pageable, hasNext);
|
||||
}
|
||||
|
||||
Object uniqueResult = result.getUniqueMappedResult();
|
||||
@@ -126,6 +156,17 @@ public class StringBasedAggregation extends AbstractMongoQuery {
|
||||
: uniqueResult;
|
||||
}
|
||||
|
||||
private List<Object> convertResults(Class<?> typeToRead, List<Object> mappedResults) {
|
||||
|
||||
List<Object> list = new ArrayList<>(mappedResults.size());
|
||||
for (Object it : mappedResults) {
|
||||
Object extractSimpleTypeResult = AggregationUtils.extractSimpleTypeResult((Document) it, typeToRead,
|
||||
mongoConverter);
|
||||
list.add(extractSimpleTypeResult);
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private boolean isSimpleReturnType(Class<?> targetType) {
|
||||
return MongoSimpleTypes.HOLDER.isSimpleType(targetType);
|
||||
}
|
||||
|
||||
@@ -15,12 +15,14 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.annotation.AnnotatedElementUtils;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.domain.Sort.Direction;
|
||||
import org.springframework.data.domain.Sort.Order;
|
||||
@@ -28,6 +30,7 @@ import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.index.Index;
|
||||
import org.springframework.data.mongodb.core.index.IndexOperationsProvider;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.repository.query.MongoEntityMetadata;
|
||||
import org.springframework.data.mongodb.repository.query.PartTreeMongoQuery;
|
||||
@@ -36,6 +39,7 @@ import org.springframework.data.repository.query.parser.Part;
|
||||
import org.springframework.data.repository.query.parser.Part.Type;
|
||||
import org.springframework.data.repository.query.parser.PartTree;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
import com.mongodb.MongoException;
|
||||
|
||||
@@ -82,9 +86,14 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTr
|
||||
Sort sort = tree.getSort();
|
||||
|
||||
for (Part part : tree.getParts()) {
|
||||
|
||||
if (GEOSPATIAL_TYPES.contains(part.getType())) {
|
||||
return;
|
||||
}
|
||||
if (isIndexOnUnwrappedType(part)) {
|
||||
return;
|
||||
}
|
||||
|
||||
String property = part.getProperty().toDotPath();
|
||||
Direction order = toDirection(sort, property);
|
||||
index.on(property, order);
|
||||
@@ -107,7 +116,7 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTr
|
||||
|
||||
MongoEntityMetadata<?> metadata = query.getQueryMethod().getEntityInformation();
|
||||
try {
|
||||
indexOperationsProvider.indexOps(metadata.getCollectionName()).ensureIndex(index);
|
||||
indexOperationsProvider.indexOps(metadata.getCollectionName(), metadata.getJavaType()).ensureIndex(index);
|
||||
} catch (UncategorizedMongoDbException e) {
|
||||
|
||||
if (e.getCause() instanceof MongoException) {
|
||||
@@ -129,6 +138,19 @@ class IndexEnsuringQueryCreationListener implements QueryCreationListener<PartTr
|
||||
LOG.debug(String.format("Created %s!", index));
|
||||
}
|
||||
|
||||
public boolean isIndexOnUnwrappedType(Part part) {
|
||||
|
||||
// TODO we could do it for nested fields in the
|
||||
Field field = ReflectionUtils.findField(part.getProperty().getOwningType().getType(),
|
||||
part.getProperty().getSegment());
|
||||
|
||||
if (field == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return AnnotatedElementUtils.hasAnnotation(field, Unwrapped.class);
|
||||
}
|
||||
|
||||
private static Direction toDirection(Sort sort, String property) {
|
||||
|
||||
if (sort.isUnsorted()) {
|
||||
|
||||
@@ -39,7 +39,6 @@ import org.springframework.data.repository.core.RepositoryInformation;
|
||||
import org.springframework.data.repository.core.RepositoryMetadata;
|
||||
import org.springframework.data.repository.core.support.RepositoryComposition.RepositoryFragments;
|
||||
import org.springframework.data.repository.core.support.RepositoryFactorySupport;
|
||||
import org.springframework.data.repository.core.support.RepositoryFragment;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy;
|
||||
import org.springframework.data.repository.query.QueryLookupStrategy.Key;
|
||||
import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider;
|
||||
@@ -92,8 +91,21 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
*/
|
||||
@Override
|
||||
protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata) {
|
||||
return getRepositoryFragments(metadata, operations);
|
||||
}
|
||||
|
||||
RepositoryFragments fragments = RepositoryFragments.empty();
|
||||
/**
|
||||
* Creates {@link RepositoryFragments} based on {@link RepositoryMetadata} to add Mongo-specific extensions. Typically
|
||||
* adds a {@link QuerydslMongoPredicateExecutor} if the repository interface uses Querydsl.
|
||||
* <p>
|
||||
* Can be overridden by subclasses to customize {@link RepositoryFragments}.
|
||||
*
|
||||
* @param metadata repository metadata.
|
||||
* @param operations the MongoDB operations manager.
|
||||
* @return
|
||||
* @since 3.2.1
|
||||
*/
|
||||
protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata, MongoOperations operations) {
|
||||
|
||||
boolean isQueryDslRepository = QUERY_DSL_PRESENT
|
||||
&& QuerydslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface());
|
||||
@@ -105,14 +117,11 @@ public class MongoRepositoryFactory extends RepositoryFactorySupport {
|
||||
"Cannot combine Querydsl and reactive repository support in a single interface");
|
||||
}
|
||||
|
||||
MongoEntityInformation<?, Serializable> entityInformation = getEntityInformation(metadata.getDomainType(),
|
||||
metadata);
|
||||
|
||||
fragments = fragments.append(RepositoryFragment.implemented(
|
||||
getTargetRepositoryViaReflection(QuerydslMongoPredicateExecutor.class, entityInformation, operations)));
|
||||
return RepositoryFragments
|
||||
.just(new QuerydslMongoPredicateExecutor<>(getEntityInformation(metadata.getDomainType()), operations));
|
||||
}
|
||||
|
||||
return fragments;
|
||||
return RepositoryFragments.empty();
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -90,7 +90,7 @@ public class MongoRepositoryFactoryBean<T extends Repository<S, ID>, S, ID exten
|
||||
|
||||
if (createIndexesForQueryMethods) {
|
||||
factory.addQueryCreationListener(
|
||||
new IndexEnsuringQueryCreationListener(collectionName -> operations.indexOps(collectionName)));
|
||||
new IndexEnsuringQueryCreationListener((collectionName, javaType) -> operations.indexOps(javaType)));
|
||||
}
|
||||
|
||||
return factory;
|
||||
|
||||
@@ -1,459 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.BsonJavaScript;
|
||||
import org.bson.BsonRegularExpression;
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
import com.querydsl.core.types.*;
|
||||
import com.querydsl.mongodb.MongodbOps;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Serializes the given Querydsl query to a Document query for MongoDB.
|
||||
* </p>
|
||||
* <p>
|
||||
* Original implementation source {@link com.querydsl.mongodb.MongodbSerializer} by {@literal The Querydsl Team}
|
||||
* (<a href="http://www.querydsl.com/team">http://www.querydsl.com/team</a>) licensed under the Apache License, Version
|
||||
* 2.0.
|
||||
* </p>
|
||||
* Modified to use {@link Document} instead of {@link com.mongodb.DBObject}, updated nullable types and code format. Use
|
||||
* Bson specific types and add {@link QuerydslMongoOps#NO_MATCH}.
|
||||
*
|
||||
* @author laimw
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @author Mikhail Kaduchka
|
||||
* @author Enrique Leon Molina
|
||||
* @since 2.1
|
||||
*/
|
||||
abstract class MongodbDocumentSerializer implements Visitor<Object, Void> {
|
||||
|
||||
@Nullable
|
||||
Object handle(Expression<?> expression) {
|
||||
return expression.accept(this, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the MongoDB specific query document.
|
||||
*
|
||||
* @param predicate must not be {@literal null}.
|
||||
* @return empty {@link Document} by default.
|
||||
*/
|
||||
Document toQuery(Predicate predicate) {
|
||||
|
||||
Object value = handle(predicate);
|
||||
|
||||
if (value == null) {
|
||||
return new Document();
|
||||
}
|
||||
|
||||
Assert.isInstanceOf(Document.class, value,
|
||||
() -> String.format("Invalid type. Expected Document but found %s", value.getClass()));
|
||||
|
||||
return (Document) value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the MongoDB specific sort document.
|
||||
*
|
||||
* @param orderBys must not be {@literal null}.
|
||||
* @return empty {@link Document} by default.
|
||||
*/
|
||||
Document toSort(List<OrderSpecifier<?>> orderBys) {
|
||||
|
||||
Document sort = new Document();
|
||||
|
||||
orderBys.forEach(orderSpecifier -> {
|
||||
|
||||
Object key = orderSpecifier.getTarget().accept(this, null);
|
||||
|
||||
Assert.notNull(key, () -> String.format("Mapped sort key for %s must not be null!", orderSpecifier));
|
||||
sort.append(key.toString(), orderSpecifier.getOrder() == Order.ASC ? 1 : -1);
|
||||
});
|
||||
|
||||
return sort;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.Constant, java.lang.Void)
|
||||
*/
|
||||
@Override
|
||||
public Object visit(Constant<?> expr, Void context) {
|
||||
|
||||
if (!Enum.class.isAssignableFrom(expr.getType())) {
|
||||
return expr.getConstant();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked") // Guarded by previous check
|
||||
Constant<? extends Enum<?>> expectedExpr = (Constant<? extends Enum<?>>) expr;
|
||||
return expectedExpr.getConstant().name();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.TemplateExpression, java.lang.Void)
|
||||
*/
|
||||
@Override
|
||||
public Object visit(TemplateExpression<?> expr, Void context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.FactoryExpression, java.lang.Void)
|
||||
*/
|
||||
@Override
|
||||
public Object visit(FactoryExpression<?> expr, Void context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
protected String asDBKey(Operation<?> expr, int index) {
|
||||
|
||||
String key = (String) asDBValue(expr, index);
|
||||
|
||||
Assert.hasText(key, () -> String.format("Mapped key must not be null nor empty for expression %s.", expr));
|
||||
return key;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected Object asDBValue(Operation<?> expr, int index) {
|
||||
return expr.getArg(index).accept(this, null);
|
||||
}
|
||||
|
||||
private String regexValue(Operation<?> expr, int index) {
|
||||
|
||||
Object value = expr.getArg(index).accept(this, null);
|
||||
|
||||
Assert.notNull(value, () -> String.format("Regex for %s must not be null.", expr));
|
||||
return Pattern.quote(value.toString());
|
||||
}
|
||||
|
||||
protected Document asDocument(String key, @Nullable Object value) {
|
||||
return new Document(key, value);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Object visit(Operation<?> expr, Void context) {
|
||||
|
||||
Operator op = expr.getOperator();
|
||||
if (op == Ops.EQ) {
|
||||
|
||||
if (expr.getArg(0) instanceof Operation) {
|
||||
Operation<?> lhs = (Operation<?>) expr.getArg(0);
|
||||
if (lhs.getOperator() == Ops.COL_SIZE || lhs.getOperator() == Ops.ARRAY_SIZE) {
|
||||
return asDocument(asDBKey(lhs, 0), asDocument("$size", asDBValue(expr, 1)));
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Illegal operation " + expr);
|
||||
}
|
||||
} else if (expr.getArg(0) instanceof Path) {
|
||||
Path<?> path = (Path<?>) expr.getArg(0);
|
||||
Constant<?> constant = (Constant<?>) expr.getArg(1);
|
||||
return asDocument(asDBKey(expr, 0), convert(path, constant));
|
||||
}
|
||||
} else if (op == Ops.STRING_IS_EMPTY) {
|
||||
return asDocument(asDBKey(expr, 0), "");
|
||||
} else if (op == Ops.AND) {
|
||||
|
||||
Queue<Map<Object, Object>> pendingDocuments = collectConnectorArgs("$and", expr);
|
||||
List<Map<Object, Object>> unmergeableDocuments = new ArrayList<>();
|
||||
List<Map<Object, Object>> generatedDocuments = new ArrayList<>();
|
||||
|
||||
while (!pendingDocuments.isEmpty()) {
|
||||
|
||||
Map<Object, Object> lhs = pendingDocuments.poll();
|
||||
|
||||
for (Map<Object, Object> rhs : pendingDocuments) {
|
||||
Set<Object> lhs2 = new LinkedHashSet<>(lhs.keySet());
|
||||
lhs2.retainAll(rhs.keySet());
|
||||
if (lhs2.isEmpty()) {
|
||||
lhs.putAll(rhs);
|
||||
} else {
|
||||
unmergeableDocuments.add(rhs);
|
||||
}
|
||||
}
|
||||
|
||||
generatedDocuments.add(lhs);
|
||||
pendingDocuments = new LinkedList<>(unmergeableDocuments);
|
||||
unmergeableDocuments = new LinkedList<>();
|
||||
}
|
||||
|
||||
return generatedDocuments.size() == 1 ? generatedDocuments.get(0) : asDocument("$and", generatedDocuments);
|
||||
} else if (op == Ops.NOT) {
|
||||
// Handle the not's child
|
||||
Operation<?> subOperation = (Operation<?>) expr.getArg(0);
|
||||
Operator subOp = subOperation.getOperator();
|
||||
if (subOp == Ops.IN) {
|
||||
return visit(
|
||||
ExpressionUtils.operation(Boolean.class, Ops.NOT_IN, subOperation.getArg(0), subOperation.getArg(1)),
|
||||
context);
|
||||
} else {
|
||||
Document arg = (Document) handle(expr.getArg(0));
|
||||
return negate(arg);
|
||||
}
|
||||
|
||||
} else if (op == Ops.OR) {
|
||||
return asDocument("$or", collectConnectorArgs("$or", expr));
|
||||
} else if (op == Ops.NE) {
|
||||
|
||||
Path<?> path = (Path<?>) expr.getArg(0);
|
||||
Constant<?> constant = (Constant<?>) expr.getArg(1);
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$ne", convert(path, constant)));
|
||||
|
||||
} else if (op == Ops.STARTS_WITH) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1)));
|
||||
} else if (op == Ops.STARTS_WITH_IC) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1), "i"));
|
||||
} else if (op == Ops.ENDS_WITH) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regexValue(expr, 1) + "$"));
|
||||
} else if (op == Ops.ENDS_WITH_IC) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regexValue(expr, 1) + "$", "i"));
|
||||
} else if (op == Ops.EQ_IGNORE_CASE) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1) + "$", "i"));
|
||||
} else if (op == Ops.STRING_CONTAINS) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression(".*" + regexValue(expr, 1) + ".*"));
|
||||
} else if (op == Ops.STRING_CONTAINS_IC) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression(".*" + regexValue(expr, 1) + ".*", "i"));
|
||||
} else if (op == Ops.MATCHES) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression(asDBValue(expr, 1).toString()));
|
||||
} else if (op == Ops.MATCHES_IC) {
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression(asDBValue(expr, 1).toString(), "i"));
|
||||
} else if (op == Ops.LIKE) {
|
||||
|
||||
String regex = ExpressionUtils.likeToRegex((Expression) expr.getArg(1)).toString();
|
||||
return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regex));
|
||||
} else if (op == Ops.BETWEEN) {
|
||||
|
||||
Document value = new Document("$gte", asDBValue(expr, 1));
|
||||
value.append("$lte", asDBValue(expr, 2));
|
||||
return asDocument(asDBKey(expr, 0), value);
|
||||
} else if (op == Ops.IN) {
|
||||
|
||||
int constIndex = 0;
|
||||
int exprIndex = 1;
|
||||
if (expr.getArg(1) instanceof Constant<?>) {
|
||||
constIndex = 1;
|
||||
exprIndex = 0;
|
||||
}
|
||||
if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) {
|
||||
@SuppressWarnings("unchecked") // guarded by previous check
|
||||
Collection<?> values = ((Constant<? extends Collection<?>>) expr.getArg(constIndex)).getConstant();
|
||||
return asDocument(asDBKey(expr, exprIndex), asDocument("$in", values));
|
||||
} else {
|
||||
Path<?> path = (Path<?>) expr.getArg(exprIndex);
|
||||
Constant<?> constant = (Constant<?>) expr.getArg(constIndex);
|
||||
return asDocument(asDBKey(expr, exprIndex), convert(path, constant));
|
||||
}
|
||||
} else if (op == Ops.NOT_IN) {
|
||||
|
||||
int constIndex = 0;
|
||||
int exprIndex = 1;
|
||||
if (expr.getArg(1) instanceof Constant<?>) {
|
||||
|
||||
constIndex = 1;
|
||||
exprIndex = 0;
|
||||
}
|
||||
if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) {
|
||||
|
||||
@SuppressWarnings("unchecked") // guarded by previous check
|
||||
Collection<?> values = ((Constant<? extends Collection<?>>) expr.getArg(constIndex)).getConstant();
|
||||
return asDocument(asDBKey(expr, exprIndex), asDocument("$nin", values));
|
||||
} else {
|
||||
|
||||
Path<?> path = (Path<?>) expr.getArg(exprIndex);
|
||||
Constant<?> constant = (Constant<?>) expr.getArg(constIndex);
|
||||
return asDocument(asDBKey(expr, exprIndex), asDocument("$ne", convert(path, constant)));
|
||||
}
|
||||
} else if (op == Ops.COL_IS_EMPTY) {
|
||||
|
||||
List<Object> list = new ArrayList<>(2);
|
||||
list.add(asDocument(asDBKey(expr, 0), new ArrayList<Object>()));
|
||||
list.add(asDocument(asDBKey(expr, 0), asDocument("$exists", false)));
|
||||
return asDocument("$or", list);
|
||||
} else if (op == Ops.LT) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$lt", asDBValue(expr, 1)));
|
||||
} else if (op == Ops.GT) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$gt", asDBValue(expr, 1)));
|
||||
} else if (op == Ops.LOE) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$lte", asDBValue(expr, 1)));
|
||||
} else if (op == Ops.GOE) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$gte", asDBValue(expr, 1)));
|
||||
} else if (op == Ops.IS_NULL) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$exists", false));
|
||||
} else if (op == Ops.IS_NOT_NULL) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$exists", true));
|
||||
} else if (op == Ops.CONTAINS_KEY) {
|
||||
|
||||
Path<?> path = (Path<?>) expr.getArg(0);
|
||||
Expression<?> key = expr.getArg(1);
|
||||
return asDocument(visit(path, context) + "." + key.toString(), asDocument("$exists", true));
|
||||
} else if (op == MongodbOps.NEAR) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$near", asDBValue(expr, 1)));
|
||||
} else if (op == MongodbOps.NEAR_SPHERE) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$nearSphere", asDBValue(expr, 1)));
|
||||
} else if (op == MongodbOps.ELEM_MATCH) {
|
||||
return asDocument(asDBKey(expr, 0), asDocument("$elemMatch", asDBValue(expr, 1)));
|
||||
} else if (op == QuerydslMongoOps.NO_MATCH) {
|
||||
return new Document("$where", new BsonJavaScript("function() { return false }"));
|
||||
}
|
||||
|
||||
throw new UnsupportedOperationException("Illegal operation " + expr);
|
||||
}
|
||||
|
||||
private Object negate(Document arg) {
|
||||
|
||||
List<Object> list = new ArrayList<>();
|
||||
for (Map.Entry<String, Object> entry : arg.entrySet()) {
|
||||
|
||||
if (entry.getKey().equals("$or")) {
|
||||
list.add(asDocument("$nor", entry.getValue()));
|
||||
} else if (entry.getKey().equals("$and")) {
|
||||
|
||||
List<Object> list2 = new ArrayList<>();
|
||||
for (Object o : ((Collection) entry.getValue())) {
|
||||
list2.add(negate((Document) o));
|
||||
}
|
||||
list.add(asDocument("$or", list2));
|
||||
} else if (entry.getValue() instanceof Pattern || entry.getValue() instanceof BsonRegularExpression) {
|
||||
list.add(asDocument(entry.getKey(), asDocument("$not", entry.getValue())));
|
||||
} else if (entry.getValue() instanceof Document) {
|
||||
list.add(negate(entry.getKey(), (Document) entry.getValue()));
|
||||
} else {
|
||||
list.add(asDocument(entry.getKey(), asDocument("$ne", entry.getValue())));
|
||||
}
|
||||
}
|
||||
return list.size() == 1 ? list.get(0) : asDocument("$or", list);
|
||||
}
|
||||
|
||||
private Object negate(String key, Document value) {
|
||||
|
||||
if (value.size() == 1) {
|
||||
return asDocument(key, asDocument("$not", value));
|
||||
} else {
|
||||
|
||||
List<Object> list2 = new ArrayList<>();
|
||||
for (Map.Entry<String, Object> entry2 : value.entrySet()) {
|
||||
list2.add(asDocument(key, asDocument("$not", asDocument(entry2.getKey(), entry2.getValue()))));
|
||||
}
|
||||
|
||||
return asDocument("$or", list2);
|
||||
}
|
||||
}
|
||||
|
||||
protected Object convert(Path<?> property, Constant<?> constant) {
|
||||
|
||||
if (isReference(property)) {
|
||||
return asReference(constant.getConstant());
|
||||
} else if (isId(property)) {
|
||||
|
||||
if (isReference(property.getMetadata().getParent())) {
|
||||
return asReferenceKey(property.getMetadata().getParent().getType(), constant.getConstant());
|
||||
} else if (constant.getType().equals(String.class) && isImplicitObjectIdConversion()) {
|
||||
|
||||
String id = (String) constant.getConstant();
|
||||
return ObjectId.isValid(id) ? new ObjectId(id) : id;
|
||||
}
|
||||
}
|
||||
return visit(constant, null);
|
||||
}
|
||||
|
||||
protected boolean isImplicitObjectIdConversion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected DBRef asReferenceKey(Class<?> entity, Object id) {
|
||||
// TODO override in subclass
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
protected abstract DBRef asReference(Object constant);
|
||||
|
||||
protected abstract boolean isReference(@Nullable Path<?> arg);
|
||||
|
||||
protected boolean isId(Path<?> arg) {
|
||||
// TODO override in subclass
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String visit(Path<?> expr, Void context) {
|
||||
|
||||
PathMetadata metadata = expr.getMetadata();
|
||||
|
||||
if (metadata.getParent() != null) {
|
||||
|
||||
Path<?> parent = metadata.getParent();
|
||||
if (parent.getMetadata().getPathType() == PathType.DELEGATE) {
|
||||
parent = parent.getMetadata().getParent();
|
||||
}
|
||||
if (metadata.getPathType() == PathType.COLLECTION_ANY) {
|
||||
return visit(parent, context);
|
||||
} else if (parent.getMetadata().getPathType() != PathType.VARIABLE) {
|
||||
|
||||
String rv = getKeyForPath(expr, metadata);
|
||||
String parentStr = visit(parent, context);
|
||||
return rv != null ? parentStr + "." + rv : parentStr;
|
||||
}
|
||||
}
|
||||
return getKeyForPath(expr, metadata);
|
||||
}
|
||||
|
||||
protected String getKeyForPath(Path<?> expr, PathMetadata metadata) {
|
||||
return metadata.getElement().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object visit(SubQueryExpression<?> expr, Void context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object visit(ParamExpression<?> expr, Void context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private LinkedList<Map<Object, Object>> collectConnectorArgs(String operator, Operation<?> operation) {
|
||||
|
||||
LinkedList<Map<Object, Object>> pendingDocuments = new LinkedList<>();
|
||||
for (Expression<?> exp : operation.getArgs()) {
|
||||
Map<Object, Object> document = (Map<Object, Object>) handle(exp);
|
||||
if (document.keySet().size() == 1 && document.containsKey(operator)) {
|
||||
pendingDocuments.addAll((Collection<Map<Object, Object>>) document.get(operator));
|
||||
} else {
|
||||
pendingDocuments.add(document);
|
||||
}
|
||||
}
|
||||
return pendingDocuments;
|
||||
|
||||
}
|
||||
}
|
||||
@@ -34,6 +34,8 @@ import com.querydsl.core.types.FactoryExpression;
|
||||
import com.querydsl.core.types.OrderSpecifier;
|
||||
import com.querydsl.core.types.ParamExpression;
|
||||
import com.querydsl.core.types.Predicate;
|
||||
import com.querydsl.mongodb.document.AbstractMongodbQuery;
|
||||
import com.querydsl.mongodb.document.MongodbDocumentSerializer;
|
||||
|
||||
/**
|
||||
* {@code QuerydslAbstractMongodbQuery} provides a base class for general Querydsl query implementation.
|
||||
@@ -49,8 +51,12 @@ import com.querydsl.core.types.Predicate;
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @deprecated since 3.3, use Querydsl's {@link AbstractMongodbQuery} directly. This class is deprecated for removal
|
||||
* with the next major release.
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class QuerydslAbstractMongodbQuery<K, Q extends QuerydslAbstractMongodbQuery<K, Q>>
|
||||
extends AbstractMongodbQuery<Q>
|
||||
implements SimpleQuery<Q> {
|
||||
|
||||
private static final JsonWriterSettings JSON_WRITER_SETTINGS = JsonWriterSettings.builder().outputMode(JsonMode.SHELL)
|
||||
@@ -67,6 +73,8 @@ public abstract class QuerydslAbstractMongodbQuery<K, Q extends QuerydslAbstract
|
||||
@SuppressWarnings("unchecked")
|
||||
QuerydslAbstractMongodbQuery(MongodbDocumentSerializer serializer) {
|
||||
|
||||
super(serializer);
|
||||
|
||||
this.queryMixin = new QueryMixin<>((Q) this, new DefaultQueryMetadata(), false);
|
||||
this.serializer = serializer;
|
||||
}
|
||||
@@ -158,22 +166,6 @@ public abstract class QuerydslAbstractMongodbQuery<K, Q extends QuerydslAbstract
|
||||
return projection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the filer {@link Document} from the given {@link Predicate}.
|
||||
*
|
||||
* @param predicate can be {@literal null}.
|
||||
* @return an empty {@link Document} if predicate is {@literal null}.
|
||||
* @see MongodbDocumentSerializer#toQuery(Predicate)
|
||||
*/
|
||||
protected Document createQuery(@Nullable Predicate predicate) {
|
||||
|
||||
if (predicate == null) {
|
||||
return new Document();
|
||||
}
|
||||
|
||||
return serializer.toQuery(predicate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the sort {@link Document} from the given list of {@link OrderSpecifier order specifiers}.
|
||||
*
|
||||
@@ -185,24 +177,6 @@ public abstract class QuerydslAbstractMongodbQuery<K, Q extends QuerydslAbstract
|
||||
return serializer.toSort(orderSpecifiers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual {@link QueryMixin} delegate.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
QueryMixin<Q> getQueryMixin() {
|
||||
return queryMixin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the where definition as a Document instance
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Document asDocument() {
|
||||
return createQuery(queryMixin.getMetadata().getWhere());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@literal Mongo Shell} representation of the query. <br />
|
||||
* The following query
|
||||
|
||||
@@ -37,7 +37,10 @@ import com.querydsl.mongodb.MongodbOps;
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
* @deprecated since 3.3, use Querydsl's {@link com.querydsl.mongodb.document.AnyEmbeddedBuilder} directly. This class
|
||||
* is deprecated for removal with the next major release.
|
||||
*/
|
||||
@Deprecated
|
||||
public class QuerydslAnyEmbeddedBuilder<Q extends QuerydslAbstractMongodbQuery<K, Q>, K> {
|
||||
|
||||
private final QueryMixin<Q> queryMixin;
|
||||
|
||||
@@ -1,272 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithProjection;
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
|
||||
import com.mysema.commons.lang.CloseableIterator;
|
||||
import com.querydsl.core.Fetchable;
|
||||
import com.querydsl.core.JoinExpression;
|
||||
import com.querydsl.core.QueryMetadata;
|
||||
import com.querydsl.core.QueryModifiers;
|
||||
import com.querydsl.core.QueryResults;
|
||||
import com.querydsl.core.types.Expression;
|
||||
import com.querydsl.core.types.ExpressionUtils;
|
||||
import com.querydsl.core.types.Operation;
|
||||
import com.querydsl.core.types.OrderSpecifier;
|
||||
import com.querydsl.core.types.Path;
|
||||
import com.querydsl.core.types.Predicate;
|
||||
import com.querydsl.core.types.dsl.CollectionPathBase;
|
||||
|
||||
/**
|
||||
* {@link Fetchable} MongoDB query with utilizing {@link MongoOperations} for command execution.
|
||||
*
|
||||
* @param <K> result type
|
||||
* @param <Q> concrete subtype
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
abstract class QuerydslFetchableMongodbQuery<K, Q extends QuerydslFetchableMongodbQuery<K, Q>>
|
||||
extends QuerydslAbstractMongodbQuery<K, Q> implements Fetchable<K> {
|
||||
|
||||
private final Class<K> entityClass;
|
||||
private final String collection;
|
||||
private final MongoOperations mongoOperations;
|
||||
private final FindWithProjection<K> find;
|
||||
|
||||
QuerydslFetchableMongodbQuery(MongodbDocumentSerializer serializer, Class<? extends K> entityClass, String collection,
|
||||
MongoOperations mongoOperations) {
|
||||
|
||||
super(serializer);
|
||||
|
||||
this.entityClass = (Class<K>) entityClass;
|
||||
this.collection = collection;
|
||||
this.mongoOperations = mongoOperations;
|
||||
find = mongoOperations.query(this.entityClass).inCollection(collection);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.Fetchable#iterable()
|
||||
*/
|
||||
@Override
|
||||
public CloseableIterator<K> iterate() {
|
||||
|
||||
org.springframework.data.util.CloseableIterator<? extends K> stream = mongoOperations.stream(createQuery(),
|
||||
entityClass, collection);
|
||||
|
||||
return new CloseableIterator<K>() {
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return stream.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public K next() {
|
||||
return stream.next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException("Cannot remove from iterator while streaming data.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
stream.close();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.Fetchable#fetch()
|
||||
*/
|
||||
@Override
|
||||
public List<K> fetch() {
|
||||
return find.matching(createQuery()).all();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.Fetchable#fetchFirst()
|
||||
*/
|
||||
@Override
|
||||
public K fetchFirst() {
|
||||
return find.matching(createQuery()).firstValue();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.Fetchable#fetchOne()
|
||||
*/
|
||||
@Override
|
||||
public K fetchOne() {
|
||||
return find.matching(createQuery()).oneValue();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.Fetchable#fetchResults()
|
||||
*/
|
||||
@Override
|
||||
public QueryResults<K> fetchResults() {
|
||||
|
||||
long total = fetchCount();
|
||||
return total > 0L ? new QueryResults<>(fetch(), getQueryMixin().getMetadata().getModifiers(), total)
|
||||
: QueryResults.emptyResults();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see com.querydsl.core.Fetchable#fetchCount()
|
||||
*/
|
||||
@Override
|
||||
public long fetchCount() {
|
||||
return find.matching(Query.of(createQuery()).skip(-1).limit(-1)).count();
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a join.
|
||||
*
|
||||
* @param ref reference
|
||||
* @param target join target
|
||||
* @return new instance of {@link QuerydslJoinBuilder}.
|
||||
*/
|
||||
public <T> QuerydslJoinBuilder<Q, K, T> join(Path<T> ref, Path<T> target) {
|
||||
return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a join.
|
||||
*
|
||||
* @param ref reference
|
||||
* @param target join target
|
||||
* @return new instance of {@link QuerydslJoinBuilder}.
|
||||
*/
|
||||
public <T> QuerydslJoinBuilder<Q, K, T> join(CollectionPathBase<?, T, ?> ref, Path<T> target) {
|
||||
return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a constraint for an embedded object.
|
||||
*
|
||||
* @param collection collection must not be {@literal null}.
|
||||
* @param target target must not be {@literal null}.
|
||||
* @return new instance of {@link QuerydslAnyEmbeddedBuilder}.
|
||||
*/
|
||||
public <T> QuerydslAnyEmbeddedBuilder<Q, K> anyEmbedded(Path<? extends Collection<T>> collection, Path<T> target) {
|
||||
return new QuerydslAnyEmbeddedBuilder<>(getQueryMixin(), collection);
|
||||
}
|
||||
|
||||
protected org.springframework.data.mongodb.core.query.Query createQuery() {
|
||||
|
||||
QueryMetadata metadata = getQueryMixin().getMetadata();
|
||||
|
||||
return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(),
|
||||
metadata.getOrderBy());
|
||||
}
|
||||
|
||||
protected org.springframework.data.mongodb.core.query.Query createQuery(@Nullable Predicate filter,
|
||||
@Nullable Expression<?> projection, QueryModifiers modifiers, List<OrderSpecifier<?>> orderBy) {
|
||||
|
||||
BasicQuery basicQuery = new BasicQuery(createQuery(filter), createProjection(projection));
|
||||
|
||||
Integer limit = modifiers.getLimitAsInteger();
|
||||
Integer offset = modifiers.getOffsetAsInteger();
|
||||
|
||||
if (limit != null) {
|
||||
basicQuery.limit(limit);
|
||||
}
|
||||
if (offset != null) {
|
||||
basicQuery.skip(offset);
|
||||
}
|
||||
if (orderBy.size() > 0) {
|
||||
basicQuery.setSortObject(createSort(orderBy));
|
||||
}
|
||||
|
||||
return basicQuery;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
protected Predicate createFilter(QueryMetadata metadata) {
|
||||
|
||||
Predicate filter;
|
||||
if (!metadata.getJoins().isEmpty()) {
|
||||
filter = ExpressionUtils.allOf(metadata.getWhere(), createJoinFilter(metadata));
|
||||
} else {
|
||||
filter = metadata.getWhere();
|
||||
}
|
||||
return filter;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Nullable
|
||||
protected Predicate createJoinFilter(QueryMetadata metadata) {
|
||||
|
||||
LinkedMultiValueMap<Expression<?>, Predicate> predicates = new LinkedMultiValueMap<>();
|
||||
List<JoinExpression> joins = metadata.getJoins();
|
||||
|
||||
for (int i = joins.size() - 1; i >= 0; i--) {
|
||||
|
||||
JoinExpression join = joins.get(i);
|
||||
Path<?> source = (Path) ((Operation<?>) join.getTarget()).getArg(0);
|
||||
Path<?> target = (Path) ((Operation<?>) join.getTarget()).getArg(1);
|
||||
Collection<Predicate> extraFilters = predicates.get(target.getRoot());
|
||||
Predicate filter = ExpressionUtils.allOf(join.getCondition(), allOf(extraFilters));
|
||||
|
||||
List<? extends Object> ids = getIds(target.getType(), filter);
|
||||
|
||||
if (ids.isEmpty()) {
|
||||
return ExpressionUtils.predicate(QuerydslMongoOps.NO_MATCH, source);
|
||||
}
|
||||
|
||||
Path<?> path = ExpressionUtils.path(String.class, source, "$id");
|
||||
predicates.add(source.getRoot(), ExpressionUtils.in((Path<Object>) path, ids));
|
||||
}
|
||||
|
||||
Path<?> source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0);
|
||||
return allOf(predicates.get(source.getRoot()));
|
||||
}
|
||||
|
||||
private Predicate allOf(Collection<Predicate> predicates) {
|
||||
return predicates != null ? ExpressionUtils.allOf(predicates) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the list of ids matching a given condition.
|
||||
*
|
||||
* @param targetType must not be {@literal null}.
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return empty {@link List} if none found.
|
||||
*/
|
||||
protected List<Object> getIds(Class<?> targetType, Predicate condition) {
|
||||
|
||||
Query query = createQuery(condition, null, QueryModifiers.EMPTY, Collections.emptyList());
|
||||
return mongoOperations.findDistinct(query, "_id", targetType, Object.class);
|
||||
}
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
/*
|
||||
* Copyright 2018-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.repository.support;
|
||||
|
||||
import com.querydsl.core.JoinType;
|
||||
import com.querydsl.core.support.QueryMixin;
|
||||
import com.querydsl.core.types.ExpressionUtils;
|
||||
import com.querydsl.core.types.Path;
|
||||
import com.querydsl.core.types.Predicate;
|
||||
|
||||
/**
|
||||
* {@code QuerydslJoinBuilder} is a builder for join constraints.
|
||||
* <p>
|
||||
* Original implementation source {@link com.querydsl.mongodb.JoinBuilder} by {@literal The Querydsl Team}
|
||||
* (<a href="http://www.querydsl.com/team">http://www.querydsl.com/team</a>) licensed under the Apache License, Version
|
||||
* 2.0.
|
||||
* </p>
|
||||
* Modified for usage with {@link QuerydslAbstractMongodbQuery}.
|
||||
*
|
||||
* @param <Q>
|
||||
* @param <T>
|
||||
* @author tiwe
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
public class QuerydslJoinBuilder<Q extends QuerydslAbstractMongodbQuery<K, Q>, K, T> {
|
||||
|
||||
private final QueryMixin<Q> queryMixin;
|
||||
private final Path<?> ref;
|
||||
private final Path<T> target;
|
||||
|
||||
QuerydslJoinBuilder(QueryMixin<Q> queryMixin, Path<?> ref, Path<T> target) {
|
||||
|
||||
this.queryMixin = queryMixin;
|
||||
this.ref = ref;
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the given join conditions.
|
||||
*
|
||||
* @param conditions must not be {@literal null}.
|
||||
* @return the target {@link QueryMixin}.
|
||||
* @see QueryMixin#on(Predicate)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Q on(Predicate... conditions) {
|
||||
|
||||
queryMixin.addJoin(JoinType.JOIN, ExpressionUtils.as((Path) ref, target));
|
||||
queryMixin.on(conditions);
|
||||
return queryMixin.getSelf();
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,7 @@ import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
|
||||
import org.springframework.data.querydsl.EntityPathResolver;
|
||||
import org.springframework.data.querydsl.QuerydslPredicateExecutor;
|
||||
import org.springframework.data.querydsl.SimpleEntityPathResolver;
|
||||
import org.springframework.data.repository.support.PageableExecutionUtils;
|
||||
import org.springframework.data.support.PageableExecutionUtils;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.querydsl.core.NonUniqueResultException;
|
||||
@@ -212,6 +212,10 @@ public class QuerydslMongoPredicateExecutor<T> extends QuerydslPredicateExecutor
|
||||
*/
|
||||
private SpringDataMongodbQuery<T> applyPagination(SpringDataMongodbQuery<T> query, Pageable pageable) {
|
||||
|
||||
if (pageable.isUnpaged()) {
|
||||
return query;
|
||||
}
|
||||
|
||||
query = query.offset(pageable.getOffset()).limit(pageable.getPageSize());
|
||||
return applySorting(query, pageable.getSort());
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ public class ReactiveMongoRepositoryFactoryBean<T extends Repository<S, ID>, S,
|
||||
|
||||
if (createIndexesForQueryMethods) {
|
||||
factory.addQueryCreationListener(new IndexEnsuringQueryCreationListener(
|
||||
collectionName -> IndexOperationsAdapter.blocking(operations.indexOps(collectionName))));
|
||||
(collectionName, javaType) -> IndexOperationsAdapter.blocking(operations.indexOps(javaType))));
|
||||
}
|
||||
|
||||
return factory;
|
||||
|
||||
@@ -22,6 +22,9 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.MongoOperations;
|
||||
import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
|
||||
import org.springframework.data.mongodb.core.query.BasicQuery;
|
||||
@@ -40,19 +43,21 @@ import com.querydsl.core.types.Operation;
|
||||
import com.querydsl.core.types.OrderSpecifier;
|
||||
import com.querydsl.core.types.Path;
|
||||
import com.querydsl.core.types.Predicate;
|
||||
import com.querydsl.core.types.dsl.CollectionPathBase;
|
||||
import com.querydsl.mongodb.MongodbOps;
|
||||
import com.querydsl.mongodb.document.MongodbDocumentSerializer;
|
||||
|
||||
/**
|
||||
* MongoDB query with utilizing {@link ReactiveMongoOperations} for command execution.
|
||||
*
|
||||
* @implNote This class uses {@link MongoOperations} to directly convert documents into the target entity type. Also, we
|
||||
* want entites to participate in lifecycle events and entity callbacks.
|
||||
* @param <K> result type
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
*/
|
||||
class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K, ReactiveSpringDataMongodbQuery<K>> {
|
||||
class ReactiveSpringDataMongodbQuery<K> extends SpringDataMongodbQuerySupport<ReactiveSpringDataMongodbQuery<K>> {
|
||||
|
||||
private final Class<K> entityClass;
|
||||
private final ReactiveMongoOperations mongoOperations;
|
||||
private final FindWithProjection<K> find;
|
||||
|
||||
@@ -60,15 +65,15 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
this(new SpringDataMongodbSerializer(mongoOperations.getConverter()), mongoOperations, entityClass, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
ReactiveSpringDataMongodbQuery(MongodbDocumentSerializer serializer, ReactiveMongoOperations mongoOperations,
|
||||
Class<? extends K> entityClass, @Nullable String collection) {
|
||||
|
||||
super(serializer);
|
||||
|
||||
this.entityClass = (Class<K>) entityClass;
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.find = StringUtils.hasText(collection) ? mongoOperations.query(this.entityClass).inCollection(collection)
|
||||
: mongoOperations.query(this.entityClass);
|
||||
this.find = StringUtils.hasText(collection) ? mongoOperations.query((Class<K>) entityClass).inCollection(collection)
|
||||
: mongoOperations.query((Class<K>) entityClass);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -99,48 +104,11 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
return createQuery().flatMap(it -> find.matching(it).count());
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a join.
|
||||
*
|
||||
* @param ref reference
|
||||
* @param target join target
|
||||
* @return new instance of {@link QuerydslJoinBuilder}.
|
||||
*/
|
||||
<T> QuerydslJoinBuilder<ReactiveSpringDataMongodbQuery<K>, K, T> join(Path<T> ref, Path<T> target) {
|
||||
return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a join.
|
||||
*
|
||||
* @param ref reference
|
||||
* @param target join target
|
||||
* @return new instance of {@link QuerydslJoinBuilder}.
|
||||
*/
|
||||
<T> QuerydslJoinBuilder<ReactiveSpringDataMongodbQuery<K>, K, T> join(CollectionPathBase<?, T, ?> ref,
|
||||
Path<T> target) {
|
||||
|
||||
return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a constraint for an embedded object.
|
||||
*
|
||||
* @param collection collection must not be {@literal null}.
|
||||
* @param target target must not be {@literal null}.
|
||||
* @return new instance of {@link QuerydslAnyEmbeddedBuilder}.
|
||||
*/
|
||||
<T> QuerydslAnyEmbeddedBuilder<ReactiveSpringDataMongodbQuery<K>, K> anyEmbedded(
|
||||
Path<? extends Collection<T>> collection, Path<T> target) {
|
||||
|
||||
return new QuerydslAnyEmbeddedBuilder<>(getQueryMixin(), collection);
|
||||
}
|
||||
|
||||
protected Mono<Query> createQuery() {
|
||||
|
||||
QueryMetadata metadata = getQueryMixin().getMetadata();
|
||||
|
||||
return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(),
|
||||
return createQuery(createReactiveFilter(metadata), metadata.getProjection(), metadata.getModifiers(),
|
||||
metadata.getOrderBy());
|
||||
}
|
||||
|
||||
@@ -160,7 +128,8 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
.defaultIfEmpty(createQuery(null)) //
|
||||
.map(it -> {
|
||||
|
||||
BasicQuery basicQuery = new BasicQuery(it, createProjection(projection));
|
||||
Document fields = createProjection(projection);
|
||||
BasicQuery basicQuery = new BasicQuery(it, fields == null ? new Document() : fields);
|
||||
|
||||
Integer limit = modifiers.getLimitAsInteger();
|
||||
Integer offset = modifiers.getOffsetAsInteger();
|
||||
@@ -179,11 +148,11 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
});
|
||||
}
|
||||
|
||||
protected Mono<Predicate> createFilter(QueryMetadata metadata) {
|
||||
protected Mono<Predicate> createReactiveFilter(QueryMetadata metadata) {
|
||||
|
||||
if (!metadata.getJoins().isEmpty()) {
|
||||
|
||||
return createJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it))
|
||||
return createReactiveJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it))
|
||||
.switchIfEmpty(Mono.justOrEmpty(metadata.getWhere()));
|
||||
}
|
||||
|
||||
@@ -197,7 +166,7 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Mono<Predicate> createJoinFilter(QueryMetadata metadata) {
|
||||
protected Mono<Predicate> createReactiveJoinFilter(QueryMetadata metadata) {
|
||||
|
||||
MultiValueMap<Expression<?>, Mono<Predicate>> predicates = new LinkedMultiValueMap<>();
|
||||
List<JoinExpression> joins = metadata.getJoins();
|
||||
@@ -230,7 +199,7 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
|
||||
Path<?> source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0);
|
||||
return allOf(predicates.get(source.getRoot())).onErrorResume(NoMatchException.class,
|
||||
e -> Mono.just(ExpressionUtils.predicate(QuerydslMongoOps.NO_MATCH, e.source)));
|
||||
e -> Mono.just(ExpressionUtils.predicate(MongodbOps.NO_MATCH, e.source)));
|
||||
}
|
||||
|
||||
private Mono<Predicate> allOf(@Nullable Collection<Mono<Predicate>> predicates) {
|
||||
@@ -246,8 +215,8 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
*/
|
||||
protected Flux<Object> getIds(Class<?> targetType, Mono<Predicate> condition) {
|
||||
|
||||
return condition.flatMapMany(it -> getIds(targetType, it))
|
||||
.switchIfEmpty(Flux.defer(() -> getIds(targetType, (Predicate) null)));
|
||||
return condition.flatMapMany(it -> getJoinIds(targetType, it))
|
||||
.switchIfEmpty(Flux.defer(() -> getJoinIds(targetType, (Predicate) null)));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -257,12 +226,18 @@ class ReactiveSpringDataMongodbQuery<K> extends QuerydslAbstractMongodbQuery<K,
|
||||
* @param condition must not be {@literal null}.
|
||||
* @return empty {@link List} if none found.
|
||||
*/
|
||||
protected Flux<Object> getIds(Class<?> targetType, @Nullable Predicate condition) {
|
||||
protected Flux<Object> getJoinIds(Class<?> targetType, @Nullable Predicate condition) {
|
||||
|
||||
return createQuery(Mono.justOrEmpty(condition), null, QueryModifiers.EMPTY, Collections.emptyList())
|
||||
.flatMapMany(query -> mongoOperations.findDistinct(query, "_id", targetType, Object.class));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Object> getIds(Class<?> aClass, Predicate predicate) {
|
||||
throw new UnsupportedOperationException(
|
||||
"Use create Flux<Object> getIds(Class<?> targetType, Mono<Predicate> condition)");
|
||||
}
|
||||
|
||||
/**
|
||||
* Marker exception to indicate no matches for a query using reference Id's.
|
||||
*/
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.repository.support;
|
||||
import static org.springframework.data.mongodb.core.query.Criteria.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
@@ -35,7 +36,7 @@ import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.repository.MongoRepository;
|
||||
import org.springframework.data.mongodb.repository.query.MongoEntityInformation;
|
||||
import org.springframework.data.repository.support.PageableExecutionUtils;
|
||||
import org.springframework.data.support.PageableExecutionUtils;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -291,10 +292,10 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
|
||||
Assert.notNull(entities, "The given Iterable of entities not be null!");
|
||||
|
||||
List<S> list = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList());
|
||||
Collection<S> list = toCollection(entities);
|
||||
|
||||
if (list.isEmpty()) {
|
||||
return list;
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return new ArrayList<>(mongoOperations.insertAll(list));
|
||||
@@ -408,8 +409,14 @@ public class SimpleMongoRepository<T, ID> implements MongoRepository<T, ID> {
|
||||
}
|
||||
|
||||
private Query getIdQuery(Iterable<? extends ID> ids) {
|
||||
|
||||
return new Query(new Criteria(entityInformation.getIdAttribute())
|
||||
.in(Streamable.of(ids).stream().collect(StreamUtils.toUnmodifiableList())));
|
||||
.in(toCollection(ids)));
|
||||
}
|
||||
|
||||
private static <E> Collection<E> toCollection(Iterable<E> ids) {
|
||||
return ids instanceof Collection ? (Collection<E>) ids
|
||||
: StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private List<T> findAll(@Nullable Query query) {
|
||||
|
||||
@@ -22,7 +22,6 @@ import reactor.core.publisher.Mono;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
@@ -50,6 +49,7 @@ import com.mongodb.client.result.DeleteResult;
|
||||
* @author Christoph Strobl
|
||||
* @author Ruben J Garcia
|
||||
* @author Jens Schauder
|
||||
* @author Clément Petit
|
||||
* @since 2.0
|
||||
*/
|
||||
public class SimpleReactiveMongoRepository<T, ID extends Serializable> implements ReactiveMongoRepository<T, ID> {
|
||||
@@ -113,8 +113,8 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
Assert.notNull(entityStream, "The given Publisher of entities must not be null!");
|
||||
|
||||
return Flux.from(entityStream).flatMap(entity -> entityInformation.isNew(entity) ? //
|
||||
mongoOperations.insert(entity, entityInformation.getCollectionName()).then(Mono.just(entity)) : //
|
||||
mongoOperations.save(entity, entityInformation.getCollectionName()).then(Mono.just(entity)));
|
||||
mongoOperations.insert(entity, entityInformation.getCollectionName()) : //
|
||||
mongoOperations.save(entity, entityInformation.getCollectionName()));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -363,9 +363,9 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
|
||||
Assert.notNull(entities, "The given Iterable of entities must not be null!");
|
||||
|
||||
List<S> source = Streamable.of(entities).stream().collect(StreamUtils.toUnmodifiableList());
|
||||
Collection<S> source = toCollection(entities);
|
||||
|
||||
return source.isEmpty() ? Flux.empty() : Flux.from(mongoOperations.insertAll(source));
|
||||
return source.isEmpty() ? Flux.empty() : mongoOperations.insertAll(source);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -466,7 +466,6 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
return mongoOperations.exists(query, example.getProbeType(), entityInformation.getCollectionName());
|
||||
}
|
||||
|
||||
|
||||
private Query getIdQuery(Object id) {
|
||||
return new Query(getIdCriteria(id));
|
||||
}
|
||||
@@ -476,14 +475,15 @@ public class SimpleReactiveMongoRepository<T, ID extends Serializable> implement
|
||||
}
|
||||
|
||||
private Query getIdQuery(Iterable<? extends ID> ids) {
|
||||
Collection<?> idCollection = StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList());
|
||||
Criteria idsInCriteria = where(entityInformation.getIdAttribute()).in(idCollection);
|
||||
return new Query(where(entityInformation.getIdAttribute()).in(toCollection(ids)));
|
||||
}
|
||||
|
||||
return new Query(idsInCriteria);
|
||||
private static <E> Collection<E> toCollection(Iterable<E> ids) {
|
||||
return ids instanceof Collection ? (Collection<E>) ids
|
||||
: StreamUtils.createStreamFromIterator(ids.iterator()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private Flux<T> findAll(Query query) {
|
||||
|
||||
return mongoOperations.find(query, entityInformation.getJavaType(), entityInformation.getCollectionName());
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user