Compare commits
290 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3342c75ecf | ||
|
|
2ac7fec75c | ||
|
|
593f068301 | ||
|
|
9e0343097f | ||
|
|
3f3eec19fe | ||
|
|
46b54da74b | ||
|
|
f35392f6dd | ||
|
|
5e241c6ea5 | ||
|
|
1078294e3e | ||
|
|
e3ec68b07c | ||
|
|
f93d7e7359 | ||
|
|
aacd04a42f | ||
|
|
59de671387 | ||
|
|
54101a4619 | ||
|
|
e9df28024a | ||
|
|
48c6e1eed5 | ||
|
|
9cdc79a89a | ||
|
|
f2bf878fbe | ||
|
|
4d8019abca | ||
|
|
47864e0cf9 | ||
|
|
ed83c7625e | ||
|
|
0d4b5de2a5 | ||
|
|
24e9841beb | ||
|
|
f130616e68 | ||
|
|
32da9f4336 | ||
|
|
5b83286da4 | ||
|
|
f84d9888dd | ||
|
|
8a9e120047 | ||
|
|
26cccf1f14 | ||
|
|
bfb9c2869c | ||
|
|
705f1b45c8 | ||
|
|
198fcbb1a0 | ||
|
|
a2b3e8562a | ||
|
|
f088c94548 | ||
|
|
b8aa26d150 | ||
|
|
4a24eb22b3 | ||
|
|
a70f592b26 | ||
|
|
a58a3965b7 | ||
|
|
3b0d1e0616 | ||
|
|
f58e462fc8 | ||
|
|
d133ef19dd | ||
|
|
7617099abe | ||
|
|
cb2fe05f44 | ||
|
|
7ce21431a9 | ||
|
|
451d996ae0 | ||
|
|
391d5101d8 | ||
|
|
fda07d9999 | ||
|
|
2bb8643d1b | ||
|
|
8de07fc3ac | ||
|
|
f774e35af2 | ||
|
|
0a95fd9868 | ||
|
|
49e6d53641 | ||
|
|
7a24dcfdd6 | ||
|
|
41d5bba130 | ||
|
|
7332a6dfe8 | ||
|
|
bd673fc8b3 | ||
|
|
3d697a1752 | ||
|
|
d3e4ddc37a | ||
|
|
182536981a | ||
|
|
929a2d5984 | ||
|
|
d32c01c92f | ||
|
|
c52d7a8c4f | ||
|
|
a6bd0fcea7 | ||
|
|
6e3e8210d0 | ||
|
|
c0718662d5 | ||
|
|
62bf12833e | ||
|
|
d4dc13894a | ||
|
|
830fad957e | ||
|
|
ed41116da1 | ||
|
|
bc027be665 | ||
|
|
b98a7063ee | ||
|
|
cb441d5b6c | ||
|
|
b789d0fc67 | ||
|
|
29c90d65af | ||
|
|
423e10b7bc | ||
|
|
f62feac421 | ||
|
|
bcbefa9264 | ||
|
|
a2243536b2 | ||
|
|
a36e292158 | ||
|
|
494c22b192 | ||
|
|
030f12023c | ||
|
|
31f640a398 | ||
|
|
54f098a906 | ||
|
|
885d05965b | ||
|
|
a8a0fb5dba | ||
|
|
67edae8602 | ||
|
|
249e7746d5 | ||
|
|
6a979088b5 | ||
|
|
fca629c117 | ||
|
|
dfbd1bded5 | ||
|
|
f9e98669b9 | ||
|
|
96d4abdf24 | ||
|
|
23442ef639 | ||
|
|
01b571dec9 | ||
|
|
04ec49eb9e | ||
|
|
d61cf8f57e | ||
|
|
50a12121f2 | ||
|
|
998bd1f9bb | ||
|
|
e0a57fa19b | ||
|
|
9c78802c47 | ||
|
|
a958ffb5c8 | ||
|
|
c31872d979 | ||
|
|
212509f56a | ||
|
|
b348bb6679 | ||
|
|
8be5dd3909 | ||
|
|
f2c4370584 | ||
|
|
fdff74f7b5 | ||
|
|
0070b12f95 | ||
|
|
bafc2bebf2 | ||
|
|
7146fb33e9 | ||
|
|
75999d9e36 | ||
|
|
7a64025669 | ||
|
|
113106037a | ||
|
|
132834b1e6 | ||
|
|
36a4b7f727 | ||
|
|
a8432f5bf1 | ||
|
|
e5a295bb8f | ||
|
|
f7cbd4264a | ||
|
|
0c37a20a0b | ||
|
|
230775f98e | ||
|
|
b28da2eed3 | ||
|
|
02de914993 | ||
|
|
5b498f809e | ||
|
|
f94a7ee742 | ||
|
|
ab0ffab488 | ||
|
|
85b47d66f1 | ||
|
|
0d8fe46f3b | ||
|
|
0bc78f99dd | ||
|
|
e4f450f667 | ||
|
|
49cd44295c | ||
|
|
767d97a831 | ||
|
|
bbe8410979 | ||
|
|
c0a4bdb548 | ||
|
|
673a81af0e | ||
|
|
977032620e | ||
|
|
c7263e5b11 | ||
|
|
eed9b2470a | ||
|
|
c350be1f52 | ||
|
|
65b058ffd9 | ||
|
|
4a5789d67e | ||
|
|
7b05cfad94 | ||
|
|
a16a9fe1fe | ||
|
|
2f98a6656b | ||
|
|
9e2f6055a3 | ||
|
|
7f58538292 | ||
|
|
2f208d712c | ||
|
|
63d9875576 | ||
|
|
b7ffff4769 | ||
|
|
715ae26f3c | ||
|
|
00350edd32 | ||
|
|
38e1d0d92d | ||
|
|
8f00ffd291 | ||
|
|
0af8d6839e | ||
|
|
9b02897db5 | ||
|
|
99203b397a | ||
|
|
eda1c79315 | ||
|
|
e7150f525e | ||
|
|
7d6b5ae5fe | ||
|
|
d70e459ffe | ||
|
|
a26e780957 | ||
|
|
8fb0e1326b | ||
|
|
9014f770d8 | ||
|
|
f128e6df15 | ||
|
|
270456ed81 | ||
|
|
4e960a9682 | ||
|
|
061c28f84a | ||
|
|
cba7eaba4c | ||
|
|
ada7e199a4 | ||
|
|
977e5e4c5c | ||
|
|
c8307d5a39 | ||
|
|
dcf184888e | ||
|
|
59d0042d13 | ||
|
|
8af904b81f | ||
|
|
ffceed8da9 | ||
|
|
34d66a276a | ||
|
|
e71ec874ab | ||
|
|
f24e8e5361 | ||
|
|
bf86f39b2d | ||
|
|
f662d7ca0d | ||
|
|
62eb719b1e | ||
|
|
69b582823a | ||
|
|
297ef98239 | ||
|
|
f71f107445 | ||
|
|
36e2d80d71 | ||
|
|
467536cb34 | ||
|
|
302c8031f9 | ||
|
|
7c6e951c7c | ||
|
|
92cc2a582a | ||
|
|
24171b3ae2 | ||
|
|
456c1ad26a | ||
|
|
fc41793d5d | ||
|
|
afef243634 | ||
|
|
869b88702d | ||
|
|
aca403c112 | ||
|
|
df0372eee1 | ||
|
|
c4c6267d91 | ||
|
|
73d5886aae | ||
|
|
0db47169cf | ||
|
|
ec16b873b7 | ||
|
|
2a3a4cf030 | ||
|
|
df2b2a2f68 | ||
|
|
fd0a402c99 | ||
|
|
6bd0f758fe | ||
|
|
10c0203605 | ||
|
|
82b33331fc | ||
|
|
75b5a548b6 | ||
|
|
0c481feb72 | ||
|
|
c8a791d367 | ||
|
|
510028a834 | ||
|
|
1a86761e2e | ||
|
|
30da62181f | ||
|
|
a977b8a790 | ||
|
|
f3e067f59f | ||
|
|
dbfd4e5c62 | ||
|
|
c574e5cf8a | ||
|
|
f9f4c4621b | ||
|
|
23254c10dc | ||
|
|
255491c446 | ||
|
|
1d943d62a3 | ||
|
|
7538b1a1a5 | ||
|
|
828c074167 | ||
|
|
87ab1ac48c | ||
|
|
454afd9877 | ||
|
|
45971b212c | ||
|
|
68370c16fb | ||
|
|
d2c9b47366 | ||
|
|
4d7ee0e741 | ||
|
|
e7f3a2436d | ||
|
|
4ef1ff6aff | ||
|
|
b6ad32d7d4 | ||
|
|
e875f9ea33 | ||
|
|
9db9d16cf8 | ||
|
|
f00991dc29 | ||
|
|
bacbd7133e | ||
|
|
f38f6d67ab | ||
|
|
3f27e8e152 | ||
|
|
23177fef0c | ||
|
|
f3b90c2b8a | ||
|
|
d57c5a9529 | ||
|
|
986ea39f90 | ||
|
|
5bd7ff1413 | ||
|
|
93b9f23b07 | ||
|
|
42ab7d2f63 | ||
|
|
a6a2f0bde9 | ||
|
|
7d0b070d1f | ||
|
|
81bc3c599b | ||
|
|
403f0019d5 | ||
|
|
4f65bb0810 | ||
|
|
ef29e69a87 | ||
|
|
5cffb3c07c | ||
|
|
61d3a0bd1f | ||
|
|
82d67c1dbb | ||
|
|
85a30ec915 | ||
|
|
c70c29b2c7 | ||
|
|
2a5ae0da37 | ||
|
|
826015e9c1 | ||
|
|
9dda0a2f93 | ||
|
|
7dfe460433 | ||
|
|
73a0f04933 | ||
|
|
a1c165921d | ||
|
|
3872b379cd | ||
|
|
98fe043b95 | ||
|
|
c217618d9d | ||
|
|
b1020d19ba | ||
|
|
a481636429 | ||
|
|
efa9a2d408 | ||
|
|
149a703ecc | ||
|
|
2b715c54d3 | ||
|
|
ece261aadb | ||
|
|
dae0ac3b4d | ||
|
|
5ab75eb65a | ||
|
|
e96ef8e18f | ||
|
|
82af678cab | ||
|
|
6ed274bd9b | ||
|
|
48ac7e75ba | ||
|
|
a51c96298f | ||
|
|
f1354c4508 | ||
|
|
ff7588f648 | ||
|
|
124036fe36 | ||
|
|
80c5b536df | ||
|
|
2ee33b1444 | ||
|
|
eec6cea507 | ||
|
|
90d03d92d8 | ||
|
|
9a48e32565 | ||
|
|
ede6927b65 | ||
|
|
2edc29f758 | ||
|
|
5bd9bcca75 | ||
|
|
54f75e653b | ||
|
|
7b33f56e33 | ||
|
|
829eed7d6c |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Tue Feb 22 13:55:00 CET 2022
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.4/apache-maven-3.8.4-bin.zip
|
||||
#Fri Jun 03 09:39:35 CEST 2022
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.5/apache-maven-3.8.5-bin.zip
|
||||
|
||||
2
CI.adoc
2
CI.adoc
@@ -1,6 +1,6 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
|
||||
|
||||
61
Jenkinsfile
vendored
61
Jenkinsfile
vendored
@@ -9,7 +9,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/2.5.x", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/2.7.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -58,10 +58,31 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK LTS + MongoDB 4.4') {
|
||||
stage('Publish JDK (main) + MongoDB 5.0') {
|
||||
when {
|
||||
changeset "ci/openjdk17-mongodb-4.4/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
anyOf {
|
||||
changeset "ci/openjdk8-mongodb-5.0/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.5.0.version']} ci/openjdk8-mongodb-5.0/")
|
||||
docker.withRegistry(p['docker.registry'], p['docker.credentials']) {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK (LTS) + MongoDB 4.4') {
|
||||
when {
|
||||
anyOf {
|
||||
changeset "ci/openjdk17-mongodb-4.4/**"
|
||||
changeset "ci/pipeline.properties"
|
||||
}
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
@@ -80,8 +101,9 @@ pipeline {
|
||||
|
||||
stage("test: baseline (main)") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch '3.2.x'
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -108,8 +130,9 @@ pipeline {
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
allOf {
|
||||
branch '3.2.x'
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -136,6 +159,28 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 5.0 (main)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials("${p['artifactory.credentials']}")
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.image("harbor-repo.vmware.com/dockerhub-proxy-cache/springci/spring-data-with-mongodb-5.0:${p['java.main.tag']}").inside(p['docker.java.inside.basic']) {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (LTS)") {
|
||||
agent {
|
||||
label 'data'
|
||||
@@ -162,8 +207,9 @@ pipeline {
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch '3.2.x'
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -179,6 +225,7 @@ pipeline {
|
||||
steps {
|
||||
script {
|
||||
docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.basic']) {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -v'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
|
||||
127
README.adoc
127
README.adoc
@@ -1,17 +1,19 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities.
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
|
||||
|
||||
[[code-of-conduct]]
|
||||
== Code of Conduct
|
||||
|
||||
This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
|
||||
[[getting-started]]
|
||||
== Getting Started
|
||||
|
||||
Here is a quick teaser of an application using Spring Data Repositories in Java:
|
||||
@@ -59,6 +61,7 @@ class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
}
|
||||
----
|
||||
|
||||
[[maven-configuration]]
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
@@ -68,24 +71,25 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.RELEASE</version>
|
||||
<version>${version}</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository
|
||||
and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
<version>${version}-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<id>spring-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
<url>https://repo.spring.io/snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
@@ -98,7 +102,7 @@ Some of the changes affect the initial setup configuration as well as compile/ru
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | 2.x | 3.x
|
||||
| Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
@@ -116,7 +120,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
| Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
@@ -133,7 +137,7 @@ Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
Element | Comment
|
||||
| Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
@@ -153,7 +157,7 @@ Element | Comment
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
Type | Comment
|
||||
| Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
@@ -174,7 +178,7 @@ Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
2.x | Replacement in 3.x | Comment
|
||||
| 2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
@@ -226,6 +230,7 @@ static class Config extends AbstractMongoClientConfiguration {
|
||||
----
|
||||
====
|
||||
|
||||
[[getting-help]]
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
@@ -239,6 +244,7 @@ If you are just starting out with Spring, try one of the https://spring.io/guide
|
||||
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
|
||||
* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues].
|
||||
|
||||
[[reporting-issues]]
|
||||
== Reporting Issues
|
||||
|
||||
Spring Data uses Github as issue tracking system to record bugs and feature requests.
|
||||
@@ -249,10 +255,86 @@ If you want to raise an issue, please follow the recommendations below:
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc.
|
||||
* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++.
|
||||
|
||||
[[guides]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
[[examples]]
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
[[building-from-source]]
|
||||
== Building from Source
|
||||
|
||||
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
|
||||
You also need JDK 1.8.
|
||||
You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io]
|
||||
and accessible from Maven using the Maven configuration noted <<maven-configuration,above>>.
|
||||
|
||||
NOTE: Configuration for Gradle is similar to Maven.
|
||||
|
||||
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||
to build a reactive one.
|
||||
|
||||
However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper]
|
||||
and minimally, JDK 8 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||
|
||||
In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download]
|
||||
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
||||
|
||||
Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to
|
||||
your MongoDB installation directory (e.g. `MONGODB_HOME`).
|
||||
|
||||
To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set]
|
||||
is required.
|
||||
|
||||
To run the MongoDB server enter the following command from a command-line:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0
|
||||
...
|
||||
"msg":"Successfully connected to host"
|
||||
----
|
||||
|
||||
Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_".
|
||||
|
||||
Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set
|
||||
the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`).
|
||||
|
||||
You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started.
|
||||
To initialize the replica set, start a mongo client:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongo
|
||||
MongoDB server version: 5.0.0
|
||||
...
|
||||
----
|
||||
|
||||
Then enter the following command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] })
|
||||
----
|
||||
|
||||
Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`.
|
||||
In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation):
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ulimit -n 32768
|
||||
----
|
||||
|
||||
You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately.
|
||||
|
||||
Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
@@ -261,7 +343,8 @@ You also need JDK 1.8.
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
||||
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
=== Building reference documentation
|
||||
|
||||
@@ -274,17 +357,7 @@ Building the documentation builds also the project without running tests.
|
||||
|
||||
The generated documentation is available from `target/site/reference/html/index.html`.
|
||||
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
[[license]]
|
||||
== License
|
||||
|
||||
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
|
||||
|
||||
@@ -7,6 +7,9 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
|
||||
@@ -7,6 +7,9 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
|
||||
@@ -7,6 +7,9 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
|
||||
@@ -7,6 +7,9 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
|
||||
23
ci/openjdk8-mongodb-5.0/Dockerfile
Normal file
23
ci/openjdk8-mongodb-5.0/Dockerfile
Normal file
@@ -0,0 +1,23 @@
|
||||
ARG BASE
|
||||
FROM ${BASE}
|
||||
# Any ARG statements before FROM are cleared.
|
||||
ARG MONGODB
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \
|
||||
# MongoDB 5.0 release signing key
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \
|
||||
# Needed when MongoDB creates a 5.0 folder.
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update; \
|
||||
apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,13 +1,11 @@
|
||||
# Java versions
|
||||
java.main.tag=8u322-b06-jdk
|
||||
java.11.tag=11.0.14.1_1-jdk
|
||||
java.15.tag=15.0.2_7-jdk-hotspot
|
||||
java.next.tag=11.0.14.1_1-jdk
|
||||
java.lts.tag=17.0.2_8-jdk
|
||||
|
||||
# Docker container images - standard
|
||||
docker.java.main.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.main.tag}
|
||||
docker.java.11.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.11.tag}
|
||||
docker.java.15.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/adoptopenjdk:${java.15.tag}
|
||||
docker.java.next.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.next.tag}
|
||||
docker.java.lts.image=harbor-repo.vmware.com/dockerhub-proxy-cache/library/eclipse-temurin:${java.lts.tag}
|
||||
|
||||
# Supported versions of MongoDB
|
||||
|
||||
29
pom.xml
29
pom.xml
@@ -5,17 +5,17 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.12</version>
|
||||
<version>3.4.2</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
<description>MongoDB support for Spring Data</description>
|
||||
<url>https://projects.spring.io/spring-data-mongodb</url>
|
||||
<url>https://spring.io/projects/spring-data-mongodb</url>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.5.12</version>
|
||||
<version>2.7.2</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.5.12</springdata.commons>
|
||||
<mongo>4.2.3</mongo>
|
||||
<springdata.commons>2.7.2</springdata.commons>
|
||||
<mongo>4.6.1</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
@@ -112,6 +112,17 @@
|
||||
</developer>
|
||||
</developers>
|
||||
|
||||
<scm>
|
||||
<connection>scm:git:https://github.com/spring-projects/spring-data-mongodb.git</connection>
|
||||
<developerConnection>scm:git:git@github.com:spring-projects/spring-data-mongodb.git</developerConnection>
|
||||
<url>https://github.com/spring-projects/spring-data-mongodb</url>
|
||||
</scm>
|
||||
|
||||
<issueManagement>
|
||||
<system>GitHub</system>
|
||||
<url>https://github.com/spring-projects/spring-data-mongodb/issues</url>
|
||||
</issueManagement>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>benchmarks</id>
|
||||
@@ -141,11 +152,11 @@
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.12</version>
|
||||
<version>3.4.2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.12</version>
|
||||
<version>3.4.2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.2.12</version>
|
||||
<version>3.4.2</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,6 +87,13 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
@@ -230,13 +237,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>nl.jqno.equalsverifier</groupId>
|
||||
<artifactId>equalsverifier</artifactId>
|
||||
@@ -310,6 +310,15 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- jMolecules -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jmolecules</groupId>
|
||||
<artifactId>jmolecules-ddd</artifactId>
|
||||
<version>${jmolecules}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -31,7 +31,7 @@ import com.mongodb.MongoDriverInformation;
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class);
|
||||
private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
@@ -68,7 +68,7 @@ public class SpringDataMongoDB {
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString);
|
||||
LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString));
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
|
||||
@@ -24,7 +24,6 @@ import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.beans.BeanMetadataElement;
|
||||
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.BeanDefinitionHolder;
|
||||
import org.springframework.beans.factory.config.RuntimeBeanReference;
|
||||
@@ -64,6 +63,7 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
@@ -135,9 +135,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider"));
|
||||
}
|
||||
|
||||
try {
|
||||
registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME);
|
||||
} catch (NoSuchBeanDefinitionException ignored) {
|
||||
if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) {
|
||||
|
||||
BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder
|
||||
.genericBeanDefinition(MongoPersistentEntityIndexCreator.class);
|
||||
@@ -151,7 +149,7 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
|
||||
BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext);
|
||||
|
||||
if (validatingMongoEventListener != null) {
|
||||
if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) {
|
||||
parserContext.registerBeanComponent(
|
||||
new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME));
|
||||
}
|
||||
@@ -165,15 +163,16 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) {
|
||||
|
||||
String disableValidation = element.getAttribute("disable-validation");
|
||||
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation);
|
||||
boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation);
|
||||
|
||||
if (!validationDisabled) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition();
|
||||
RuntimeBeanReference validator = getValidator(builder, parserContext);
|
||||
RuntimeBeanReference validator = getValidator(element, parserContext);
|
||||
|
||||
if (validator != null) {
|
||||
builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class);
|
||||
builder.getRawBeanDefinition().setSource(element);
|
||||
builder.addConstructorArgValue(validator);
|
||||
|
||||
return builder.getBeanDefinition();
|
||||
@@ -195,7 +194,6 @@ public class MappingMongoConverterParser implements BeanDefinitionParser {
|
||||
validatorDef.setSource(source);
|
||||
validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
|
||||
String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef);
|
||||
parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName));
|
||||
|
||||
return new RuntimeBeanReference(validatorName);
|
||||
}
|
||||
|
||||
@@ -22,9 +22,12 @@ import java.util.Map;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionValidationException;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoServerApiFactoryBean;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -112,6 +115,20 @@ abstract class MongoParsingUtils {
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// ServerAPI
|
||||
if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) {
|
||||
|
||||
MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean();
|
||||
serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version"));
|
||||
try {
|
||||
clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject());
|
||||
} catch (Exception exception) {
|
||||
throw new BeanDefinitionValidationException("Non parsable server-api.", exception);
|
||||
}
|
||||
} else {
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi");
|
||||
}
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
@@ -21,8 +21,8 @@ import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -43,8 +43,8 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'. Check your replica set configuration!";
|
||||
private static final Log LOG = LogFactory.getLog(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -88,14 +88,18 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -105,9 +109,13 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]));
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]));
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -17,8 +17,11 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.timeseries.GranularityDefinition;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -42,6 +45,7 @@ public class CollectionOptions {
|
||||
private @Nullable Boolean capped;
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
@@ -54,17 +58,19 @@ public class CollectionOptions {
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none());
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions) {
|
||||
@Nullable Collation collation, ValidationOptions validationOptions,
|
||||
@Nullable TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
this.capped = capped;
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
this.timeSeriesOptions = timeSeriesOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,7 +84,7 @@ public class CollectionOptions {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -88,7 +94,21 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none());
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use
|
||||
* {@link #timeSeries(TimeSeriesOptions)}.
|
||||
*
|
||||
* @param timeField The name of the property which contains the date in each time series document. Must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #timeSeries(TimeSeriesOptions)
|
||||
* @since 3.3
|
||||
*/
|
||||
public static CollectionOptions timeSeries(String timeField) {
|
||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -99,7 +119,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -110,7 +130,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -121,7 +141,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -132,7 +152,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -252,7 +272,20 @@ public class CollectionOptions {
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param timeSeriesOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -303,6 +336,16 @@ public class CollectionOptions {
|
||||
return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link TimeSeriesOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<TimeSeriesOptions> getTimeSeriesOptions() {
|
||||
return Optional.ofNullable(timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
@@ -398,4 +441,89 @@ public class CollectionOptions {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options applicable to Time Series collections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
public static class TimeSeriesOptions {
|
||||
|
||||
private final String timeField;
|
||||
|
||||
private @Nullable final String metaField;
|
||||
|
||||
private final GranularityDefinition granularity;
|
||||
|
||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||
|
||||
Assert.hasText(timeField, "Time field must not be empty or null!");
|
||||
|
||||
this.timeField = timeField;
|
||||
this.metaField = metaField;
|
||||
this.granularity = granularity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one,
|
||||
* that contains the date in each time series document. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param timeField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public static TimeSeriesOptions timeSeries(String timeField) {
|
||||
return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the name of the field which contains metadata in each time series document. Should not be the {@literal id}
|
||||
* nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or
|
||||
* {@link java.util.Collection}. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param metaField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public TimeSeriesOptions metaField(String metaField) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized.
|
||||
* Select one that is closest to the time span between incoming measurements.
|
||||
*
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
* @see Granularity
|
||||
*/
|
||||
public TimeSeriesOptions granularity(GranularityDefinition granularity) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getTimeField() {
|
||||
return timeField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via
|
||||
* {@link org.springframework.util.StringUtils#hasText(String)}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getMetaField() {
|
||||
return metaField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public GranularityDefinition getGranularity() {
|
||||
return granularity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Encryption algorithms supported by MongoDB Client Side Field Level Encryption.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public final class EncryptionAlgorithms {
|
||||
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic";
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random";
|
||||
|
||||
}
|
||||
@@ -21,27 +21,45 @@ import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.TimeSeries;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.EntityProjectionIntrospector;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CreateCollectionOptions;
|
||||
import com.mongodb.client.model.TimeSeriesGranularity;
|
||||
import com.mongodb.client.model.ValidationOptions;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
@@ -58,9 +76,31 @@ class EntityOperations {
|
||||
private static final String ID_FIELD = "_id";
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
private final QueryMapper queryMapper;
|
||||
|
||||
EntityOperations(MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
private final EntityProjectionIntrospector introspector;
|
||||
|
||||
private final MongoJsonSchemaMapper schemaMapper;
|
||||
|
||||
EntityOperations(MongoConverter converter) {
|
||||
this(converter, new QueryMapper(converter));
|
||||
}
|
||||
|
||||
EntityOperations(MongoConverter converter, QueryMapper queryMapper) {
|
||||
this(converter, converter.getMappingContext(), converter.getCustomConversions(), converter.getProjectionFactory(),
|
||||
queryMapper);
|
||||
}
|
||||
|
||||
EntityOperations(MongoConverter converter,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
CustomConversions conversions, ProjectionFactory projectionFactory, QueryMapper queryMapper) {
|
||||
this.context = context;
|
||||
this.queryMapper = queryMapper;
|
||||
this.introspector = EntityProjectionIntrospector.create(projectionFactory,
|
||||
EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy()
|
||||
.and(((target, underlyingType) -> !conversions.isSimpleType(target))),
|
||||
context);
|
||||
this.schemaMapper = new MongoJsonSchemaMapper(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -225,6 +265,89 @@ class EntityOperations {
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspect the given {@link Class result type} in the context of the {@link Class entity type} whether the returned
|
||||
* type is a projection and what property paths are participating in the projection.
|
||||
*
|
||||
* @param resultType the type to project on. Must not be {@literal null}.
|
||||
* @param entityType the source domain type. Must not be {@literal null}.
|
||||
* @return the introspection result.
|
||||
* @since 3.4
|
||||
* @see EntityProjectionIntrospector#introspect(Class, Class)
|
||||
*/
|
||||
public <M, D> EntityProjection<M, D> introspectProjection(Class<M> resultType, Class<D> entityType) {
|
||||
return introspector.introspect(resultType, entityType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert given {@link CollectionOptions} to a document and take the domain type information into account when
|
||||
* creating a mapped schema for validation.
|
||||
*
|
||||
* @param collectionOptions can be {@literal null}.
|
||||
* @param entityType must not be {@literal null}. Use {@link Object} type instead.
|
||||
* @return the converted {@link CreateCollectionOptions}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions,
|
||||
Class<?> entityType) {
|
||||
|
||||
Optional<Collation> collation = Optionals.firstNonEmpty(
|
||||
() -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation),
|
||||
() -> forType(entityType).getCollation());//
|
||||
|
||||
CreateCollectionOptions result = new CreateCollectionOptions();
|
||||
collation.map(Collation::toMongoCollation).ifPresent(result::collation);
|
||||
|
||||
if (collectionOptions == null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
collectionOptions.getCapped().ifPresent(result::capped);
|
||||
collectionOptions.getSize().ifPresent(result::sizeInBytes);
|
||||
collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments);
|
||||
collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation);
|
||||
|
||||
collectionOptions.getValidationOptions().ifPresent(it -> {
|
||||
|
||||
ValidationOptions validationOptions = new ValidationOptions();
|
||||
|
||||
it.getValidationAction().ifPresent(validationOptions::validationAction);
|
||||
it.getValidationLevel().ifPresent(validationOptions::validationLevel);
|
||||
|
||||
it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType)));
|
||||
|
||||
result.validationOptions(validationOptions);
|
||||
});
|
||||
|
||||
collectionOptions.getTimeSeriesOptions().map(forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> {
|
||||
|
||||
com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(
|
||||
it.getTimeField());
|
||||
|
||||
if (StringUtils.hasText(it.getMetaField())) {
|
||||
options.metaField(it.getMetaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(it.getGranularity())) {
|
||||
options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase()));
|
||||
}
|
||||
|
||||
result.timeSeriesOptions(options);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private Document getMappedValidator(Validator validator, Class<?> domainType) {
|
||||
|
||||
Document validationRules = validator.toDocument();
|
||||
|
||||
if (validationRules.containsKey("$jsonSchema")) {
|
||||
return schemaMapper.mapSchema(validationRules, domainType);
|
||||
}
|
||||
|
||||
return queryMapper.getMappedObject(validationRules, context.getPersistentEntity(domainType));
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of information about an entity.
|
||||
*
|
||||
@@ -778,6 +901,24 @@ class EntityOperations {
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
|
||||
/**
|
||||
* Derive the applicable {@link CollectionOptions} for the given type.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
CollectionOptions getCollectionOptions();
|
||||
|
||||
/**
|
||||
* Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially
|
||||
* annotated field names.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -817,6 +958,16 @@ class EntityOperations {
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
return CollectionOptions.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) {
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -854,6 +1005,58 @@ class EntityOperations {
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
|
||||
CollectionOptions collectionOptions = CollectionOptions.empty();
|
||||
if (entity.hasCollation()) {
|
||||
collectionOptions = collectionOptions.collation(entity.getCollation());
|
||||
}
|
||||
|
||||
if (entity.isAnnotationPresent(TimeSeries.class)) {
|
||||
|
||||
TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class);
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.timeField()) == null) {
|
||||
throw new MappingException(String.format("Time series field '%s' does not exist in type %s",
|
||||
timeSeries.timeField(), entity.getName()));
|
||||
}
|
||||
|
||||
TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField());
|
||||
if (StringUtils.hasText(timeSeries.metaField())) {
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.metaField()) == null) {
|
||||
throw new MappingException(
|
||||
String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName()));
|
||||
}
|
||||
|
||||
options = options.metaField(timeSeries.metaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(timeSeries.granularity())) {
|
||||
options = options.granularity(timeSeries.granularity());
|
||||
}
|
||||
collectionOptions = collectionOptions.timeSeries(options);
|
||||
}
|
||||
|
||||
return collectionOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) {
|
||||
|
||||
TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField()));
|
||||
|
||||
if (StringUtils.hasText(source.getMetaField())) {
|
||||
target = target.metaField(mappedNameOrDefault(source.getMetaField()));
|
||||
}
|
||||
return target.granularity(source.getGranularity());
|
||||
}
|
||||
|
||||
private String mappedNameOrDefault(String name) {
|
||||
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
|
||||
return persistentProperty != null ? persistentProperty.getFieldName() : name;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -115,6 +115,10 @@ abstract class IndexConverters {
|
||||
ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class)));
|
||||
}
|
||||
|
||||
if (indexOptions.containsKey("wildcardProjection")) {
|
||||
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
||||
}
|
||||
|
||||
return ops;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -20,13 +20,19 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
@@ -34,10 +40,13 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
@@ -52,6 +61,8 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final Predicate<JsonSchemaPropertyContext> filter;
|
||||
private final LinkedMultiValueMap<String, Class<?>> mergeProperties;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
@@ -61,10 +72,47 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
this(converter, (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter.getMappingContext(),
|
||||
(property) -> true, new LinkedMultiValueMap<>());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter,
|
||||
MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
Predicate<JsonSchemaPropertyContext> filter, LinkedMultiValueMap<String, Class<?>> mergeProperties) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
this.mappingContext = mappingContext;
|
||||
this.filter = filter;
|
||||
this.mergeProperties = mergeProperties;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter) {
|
||||
return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, mergeProperties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PropertySpecifier property(String path) {
|
||||
return types -> withTypesFor(path, types);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify additional types to be considered wehen rendering the schema for the given path.
|
||||
*
|
||||
* @param path path the path using {@literal dot '.'} notation.
|
||||
* @param types must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public MongoJsonSchemaCreator withTypesFor(String path, Class<?>... types) {
|
||||
|
||||
LinkedMultiValueMap<String, Class<?>> clone = mergeProperties.clone();
|
||||
for (Class<?> type : types) {
|
||||
clone.add(path, type);
|
||||
}
|
||||
return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, clone);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -77,11 +125,29 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
{
|
||||
Encrypted encrypted = entity.findAnnotation(Encrypted.class);
|
||||
if (encrypted != null) {
|
||||
|
||||
Document encryptionMetadata = new Document();
|
||||
|
||||
Collection<Object> encryptionKeyIds = entity.getEncryptionKeyIds();
|
||||
if (!CollectionUtils.isEmpty(encryptionKeyIds)) {
|
||||
encryptionMetadata.append("keyId", encryptionKeyIds);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
encryptionMetadata.append("algorithm", encrypted.algorithm());
|
||||
}
|
||||
|
||||
schemaBuilder.encryptionMetadata(encryptionMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -93,6 +159,14 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
String stringPath = currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining("."));
|
||||
stringPath = StringUtils.hasText(stringPath) ? (stringPath + "." + nested.getName()) : nested.getName();
|
||||
if (!filter.test(new PropertyContext(stringPath, nested))) {
|
||||
if (!mergeProperties.containsKey(stringPath)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
@@ -108,27 +182,114 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private JsonSchemaProperty computeSchemaForProperty(List<MongoPersistentProperty> path) {
|
||||
|
||||
String stringPath = path.stream().map(MongoPersistentProperty::getName).collect(Collectors.joining("."));
|
||||
MongoPersistentProperty property = CollectionUtils.lastElement(path);
|
||||
|
||||
boolean required = isRequiredProperty(property);
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
if (!isCollection(property) && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
if (property.isEntity() || mergeProperties.containsKey(stringPath)) {
|
||||
List<JsonSchemaProperty> targetProperties = new ArrayList<>();
|
||||
|
||||
if (property.isEntity()) {
|
||||
targetProperties.add(createObjectSchemaPropertyForEntity(path, property, required));
|
||||
}
|
||||
if (mergeProperties.containsKey(stringPath)) {
|
||||
for (Class<?> theType : mergeProperties.get(stringPath)) {
|
||||
|
||||
ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName());
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(path,
|
||||
mappingContext.getRequiredPersistentEntity(theType));
|
||||
|
||||
targetProperties.add(createPotentiallyRequiredSchemaProperty(
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required));
|
||||
}
|
||||
}
|
||||
return targetProperties.size() == 1 ? targetProperties.iterator().next()
|
||||
: JsonSchemaProperty.merged(targetProperties);
|
||||
}
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
JsonSchemaProperty schemaProperty;
|
||||
if (isCollection(property)) {
|
||||
schemaProperty = createArraySchemaProperty(fieldName, property, required);
|
||||
} else if (property.isMap()) {
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
schemaProperty = createEnumSchemaProperty(fieldName, targetType, required);
|
||||
} else {
|
||||
schemaProperty = createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
return applyEncryptionDataIfNecessary(property, schemaProperty);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property,
|
||||
boolean required) {
|
||||
|
||||
ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName);
|
||||
|
||||
if (isSpecificType(property)) {
|
||||
schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(schemaProperty, required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property,
|
||||
ArrayJsonSchemaProperty schemaProperty) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getPersistentEntity(property.getTypeInformation().getRequiredComponentType());
|
||||
|
||||
if (persistentEntity != null) {
|
||||
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity);
|
||||
|
||||
if (nestedProperties.isEmpty()) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
return schemaProperty
|
||||
.items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0])));
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignable(Enum.class, property.getActualType())) {
|
||||
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) property.getActualType());
|
||||
|
||||
return schemaProperty
|
||||
.items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues));
|
||||
}
|
||||
|
||||
return schemaProperty.items(JsonSchemaObject.of(property.getActualType()));
|
||||
}
|
||||
|
||||
private boolean isSpecificType(MongoPersistentProperty property) {
|
||||
return !ClassTypeInformation.OBJECT.equals(property.getTypeInformation().getActualType());
|
||||
}
|
||||
|
||||
private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property,
|
||||
JsonSchemaProperty schemaProperty) {
|
||||
|
||||
Encrypted encrypted = property.findAnnotation(Encrypted.class);
|
||||
if (encrypted == null) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty);
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
enc = enc.algorithm(encrypted.algorithm());
|
||||
}
|
||||
if (!ObjectUtils.isEmpty(encrypted.keyId())) {
|
||||
enc = enc.keys(property.getEncryptionKeyIds());
|
||||
}
|
||||
return enc;
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -142,15 +303,12 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) targetType);
|
||||
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
targetType = computeTargetType(targetType, possibleValues);
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
@@ -161,14 +319,20 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues);
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private TypedJsonSchemaObject createSchemaObject(Object type, Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
return schemaObject;
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
@@ -199,12 +363,53 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
private static Class<?> computeTargetType(Class<?> fallback, List<Object> possibleValues) {
|
||||
return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass();
|
||||
}
|
||||
|
||||
if (!required) {
|
||||
private <E extends Enum<E>> List<Object> getPossibleEnumValues(Class<E> targetType) {
|
||||
|
||||
EnumSet<E> enumSet = EnumSet.allOf(targetType);
|
||||
List<Object> possibleValues = new ArrayList<>(enumSet.size());
|
||||
|
||||
for (Object enumValue : enumSet) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
return possibleValues;
|
||||
}
|
||||
|
||||
private static boolean isCollection(MongoPersistentProperty property) {
|
||||
return property.isCollectionLike() && !property.getType().equals(byte[].class);
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
return required ? JsonSchemaProperty.required(property) : property;
|
||||
}
|
||||
|
||||
class PropertyContext implements JsonSchemaPropertyContext {
|
||||
|
||||
private final String path;
|
||||
private final MongoPersistentProperty property;
|
||||
|
||||
public PropertyContext(String path, MongoPersistentProperty property) {
|
||||
this.path = path;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getProperty() {
|
||||
return property;
|
||||
}
|
||||
|
||||
return JsonSchemaProperty.required(property);
|
||||
@Override
|
||||
public <T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property) {
|
||||
return (MongoPersistentEntity<T>) mappingContext.getPersistentEntity(property);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,6 +36,7 @@ import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
@@ -113,6 +114,7 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
private @Nullable ServerApi serverApi;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
@@ -395,6 +397,15 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverApi can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#serverApi(ServerApi)
|
||||
* @since 3.3
|
||||
*/
|
||||
public void setServerApi(@Nullable ServerApi serverApi) {
|
||||
this.serverApi = serverApi;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
@@ -476,9 +487,11 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
|
||||
if (uUidRepresentation != null) {
|
||||
builder.uuidRepresentation(uUidRepresentation);
|
||||
builder = builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
if (serverApi != null) {
|
||||
builder = builder.serverApi(serverApi);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
|
||||
@@ -15,7 +15,24 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -46,6 +63,7 @@ import org.springframework.util.Assert;
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
* {@link Encrypted} properties will contain {@literal encrypt} information.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
@@ -60,6 +78,111 @@ public interface MongoJsonSchemaCreator {
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Create a merged {@link MongoJsonSchema} out of the individual schemas of the given types by merging their
|
||||
* properties into one large {@link MongoJsonSchema schema}.
|
||||
*
|
||||
* @param types must not be {@literal null} nor contain {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchema}.
|
||||
* @since 3.4
|
||||
*/
|
||||
default MongoJsonSchema mergedSchemaFor(Class<?>... types) {
|
||||
|
||||
MongoJsonSchema[] schemas = Arrays.stream(types).map(this::createSchemaFor).toArray(MongoJsonSchema[]::new);
|
||||
return MongoJsonSchema.merge(schemas);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter matching {@link JsonSchemaProperty properties}.
|
||||
*
|
||||
* @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter);
|
||||
|
||||
/**
|
||||
* Entry point to specify additional behavior for a given path.
|
||||
*
|
||||
* @param path the path using {@literal dot '.'} notation.
|
||||
* @return new instance of {@link PropertySpecifier}.
|
||||
* @since 3.4
|
||||
*/
|
||||
PropertySpecifier property(String path);
|
||||
|
||||
/**
|
||||
* The context in which a specific {@link #getProperty()} is encountered during schema creation.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
interface JsonSchemaPropertyContext {
|
||||
|
||||
/**
|
||||
* The path to a given field/property in dot notation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getPath();
|
||||
|
||||
/**
|
||||
* The current property.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoPersistentProperty getProperty();
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoPersistentEntity} for a given property.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return {@literal null} if the property is not an entity. It is nevertheless recommend to check
|
||||
* {@link PersistentProperty#isEntity()} first.
|
||||
*/
|
||||
@Nullable
|
||||
<T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones.
|
||||
*
|
||||
* @return new instance of {@link Predicate}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static Predicate<JsonSchemaPropertyContext> encryptedOnly() {
|
||||
|
||||
return new Predicate<JsonSchemaPropertyContext>() {
|
||||
|
||||
// cycle guard
|
||||
private final Set<MongoPersistentProperty> seen = new HashSet<>();
|
||||
|
||||
@Override
|
||||
public boolean test(JsonSchemaPropertyContext context) {
|
||||
return extracted(context.getProperty(), context);
|
||||
}
|
||||
|
||||
private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) {
|
||||
if (property.isAnnotationPresent(Encrypted.class)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!property.isEntity() || seen.contains(property)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
seen.add(property);
|
||||
|
||||
for (MongoPersistentProperty nested : context.resolveEntity(property)) {
|
||||
if (extracted(nested, context)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
@@ -72,4 +195,56 @@ public interface MongoJsonSchemaCreator {
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential
|
||||
* {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We
|
||||
* recommend to use {@link #create(MappingContext)}.
|
||||
*
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create() {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER);
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.4
|
||||
*/
|
||||
interface PropertySpecifier {
|
||||
|
||||
/**
|
||||
* Set additional type parameters for polymorphic ones.
|
||||
*
|
||||
* @param types must not be {@literal null}.
|
||||
* @return the source
|
||||
*/
|
||||
MongoJsonSchemaCreator withTypes(Class<?>... types);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,8 +57,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
/**
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <br />
|
||||
* proxy). <br />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -84,7 +83,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
String getCollectionName(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Execute the a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to
|
||||
* Execute a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to
|
||||
* obtain the {@link Document} holding the actual command. Any errors that result from executing this command will be
|
||||
* converted into Spring's DAO exception hierarchy.
|
||||
*
|
||||
@@ -124,8 +123,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch);
|
||||
|
||||
/**
|
||||
* Executes a {@link DbCallback} translating any exceptions as necessary.
|
||||
* <br />
|
||||
* Executes a {@link DbCallback} translating any exceptions as necessary. <br />
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not
|
||||
@@ -137,8 +135,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T execute(DbCallback<T> action);
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the entity collection of the specified class.
|
||||
* <br />
|
||||
* Executes the given {@link CollectionCallback} on the entity collection of the specified class. <br />
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
@@ -150,8 +147,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T execute(Class<?> entityClass, CollectionCallback<T> action);
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the collection of the given name.
|
||||
* <br />
|
||||
* Executes the given {@link CollectionCallback} on the collection of the given name. <br />
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be
|
||||
@@ -175,8 +171,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB. <br />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
|
||||
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
|
||||
*
|
||||
@@ -211,8 +206,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* <br />
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}. <br />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -299,8 +293,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and
|
||||
* is created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <br />
|
||||
* exists} first. <br />
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -309,8 +302,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
MongoCollection<Document> getCollection(String collectionName);
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
* <br />
|
||||
* Check to see if a collection with a name indicated by the entity class exists. <br />
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the name of the collection. Must not be {@literal null}.
|
||||
@@ -319,8 +311,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> boolean collectionExists(Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a given name exists.
|
||||
* <br />
|
||||
* Check to see if a collection with a given name exists. <br />
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -329,8 +320,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
boolean collectionExists(String collectionName);
|
||||
|
||||
/**
|
||||
* Drop the collection with the name indicated by the entity class.
|
||||
* <br />
|
||||
* Drop the collection with the name indicated by the entity class. <br />
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}.
|
||||
@@ -338,8 +328,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> void dropCollection(Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Drop the collection with the given name.
|
||||
* <br />
|
||||
* Drop the collection with the given name. <br />
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection to drop/delete.
|
||||
@@ -402,11 +391,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
BulkOperations bulkOps(BulkMode mode, @Nullable Class<?> entityType, String collectionName);
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <br />
|
||||
* Query for a list of objects of type T from the collection used by the entity class. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -416,11 +403,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> List<T> findAll(Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the specified collection.
|
||||
* <br />
|
||||
* Query for a list of objects of type T from the specified collection. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -538,12 +523,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, String collectionName, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* <br />
|
||||
* of the inputCollection is derived from the inputType of the aggregation. <br />
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -556,11 +539,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(TypedAggregation<?> aggregation, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* needs to be closed. The raw results will be mapped to the given entity class. <br />
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -575,11 +556,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<O> CloseableIterator<O> aggregateStream(Aggregation aggregation, Class<?> inputType, Class<O> outputType);
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. <br />
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* needs to be closed. The raw results will be mapped to the given entity class. <br />
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -601,7 +580,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* @param reduceFunction The JavaScript reduce function
|
||||
* @param entityClass The parametrized type of the returned list. Must not be {@literal null}.
|
||||
* @return The results of the map reduce operation
|
||||
* @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> MapReduceResults<T> mapReduce(String inputCollectionName, String mapFunction, String reduceFunction,
|
||||
Class<T> entityClass);
|
||||
|
||||
@@ -614,7 +595,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* @param mapReduceOptions Options that specify detailed map-reduce behavior.
|
||||
* @param entityClass The parametrized type of the returned list. Must not be {@literal null}.
|
||||
* @return The results of the map reduce operation
|
||||
* @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> MapReduceResults<T> mapReduce(String inputCollectionName, String mapFunction, String reduceFunction,
|
||||
@Nullable MapReduceOptions mapReduceOptions, Class<T> entityClass);
|
||||
|
||||
@@ -628,7 +611,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* @param reduceFunction The JavaScript reduce function
|
||||
* @param entityClass The parametrized type of the returned list. Must not be {@literal null}.
|
||||
* @return The results of the map reduce operation
|
||||
* @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction,
|
||||
Class<T> entityClass);
|
||||
|
||||
@@ -642,7 +627,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* @param mapReduceOptions Options that specify detailed map-reduce behavior
|
||||
* @param entityClass The parametrized type of the returned list. Must not be {@literal null}.
|
||||
* @return The results of the map reduce operation
|
||||
* @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> MapReduceResults<T> mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction,
|
||||
@Nullable MapReduceOptions mapReduceOptions, Class<T> entityClass);
|
||||
|
||||
@@ -701,11 +688,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
|
||||
* specified type.
|
||||
* <br />
|
||||
* specified type. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -719,11 +704,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type.
|
||||
* <br />
|
||||
* type. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -767,11 +750,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
boolean exists(Query query, @Nullable Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
|
||||
* <br />
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -783,11 +764,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> List<T> find(Query query, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a List of the specified type.
|
||||
* <br />
|
||||
* Map the results of an ad-hoc query on the specified collection to a List of the specified type. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1119,10 +1098,8 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
* database.
|
||||
* <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* database. <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. <br />
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1139,8 +1116,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* type. The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1159,18 +1135,17 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* count all matches. <br />
|
||||
* This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
* aggregation execution} which may have an impact on performance.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the count of matching documents.
|
||||
* @see #exactCount(Query, Class)
|
||||
* @see #estimatedCount(Class)
|
||||
*/
|
||||
long count(Query query, Class<?> entityClass);
|
||||
|
||||
@@ -1181,25 +1156,44 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* count all matches. <br />
|
||||
* This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
* aggregation execution} which may have an impact on performance.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @see #count(Query, Class, String)
|
||||
* @see #exactCount(Query, String)
|
||||
* @see #estimatedCount(String)
|
||||
*/
|
||||
long count(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches. <br />
|
||||
* This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} which may have an impact on performance.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
* @param entityClass the parametrized type. Can be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @see #count(Query, Class, String)
|
||||
* @see #estimatedCount(String)
|
||||
*/
|
||||
long count(Query query, @Nullable Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <br />
|
||||
* based on collection statistics. <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
@@ -1214,8 +1208,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <br />
|
||||
* Estimate the number of documents in the given collection based on collection statistics. <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
@@ -1225,14 +1218,60 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*/
|
||||
long estimatedCount(String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches. <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the count of matching documents.
|
||||
* @since 3.4
|
||||
*/
|
||||
default long exactCount(Query query, Class<?> entityClass) {
|
||||
return exactCount(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches. <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @see #count(Query, Class, String)
|
||||
* @since 3.4
|
||||
*/
|
||||
default long exactCount(Query query, String collectionName) {
|
||||
return exactCount(query, null, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* count all matches. <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
@@ -1244,20 +1283,18 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* @param entityClass the parametrized type. Can be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @since 3.4
|
||||
*/
|
||||
long count(Query query, @Nullable Class<?> entityClass, String collectionName);
|
||||
long exactCount(Query query, @Nullable Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* Insert the object into the collection for the entity type of the object to save. <br />
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. <br />
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* Type Conversion"</a> for more details. <br />
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
@@ -1269,11 +1306,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T insert(T objectToSave);
|
||||
|
||||
/**
|
||||
* Insert the object into the specified collection.
|
||||
* <br />
|
||||
* Insert the object into the specified collection. <br />
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
@@ -1314,17 +1349,14 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* object is not already present, that is an 'upsert'. <br />
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* Type Conversion"</a> for more details. <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1335,16 +1367,14 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* is an 'upsert'. <br />
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used. <br />
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* <br />
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion</a> for more details. <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.ServerApi.Builder;
|
||||
import com.mongodb.ServerApiVersion;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class MongoServerApiFactoryBean implements FactoryBean<ServerApi> {
|
||||
|
||||
private String version;
|
||||
private @Nullable Boolean deprecationErrors;
|
||||
private @Nullable Boolean strict;
|
||||
|
||||
/**
|
||||
* @param version the version string either as the enum name or the server version value.
|
||||
* @see ServerApiVersion
|
||||
*/
|
||||
public void setVersion(String version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param deprecationErrors
|
||||
* @see ServerApi.Builder#deprecationErrors(boolean)
|
||||
*/
|
||||
public void setDeprecationErrors(@Nullable Boolean deprecationErrors) {
|
||||
this.deprecationErrors = deprecationErrors;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param strict
|
||||
* @see ServerApi.Builder#strict(boolean)
|
||||
*/
|
||||
public void setStrict(@Nullable Boolean strict) {
|
||||
this.strict = strict;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public ServerApi getObject() throws Exception {
|
||||
|
||||
Builder builder = ServerApi.builder().version(version());
|
||||
|
||||
if (deprecationErrors != null) {
|
||||
builder = builder.deprecationErrors(deprecationErrors);
|
||||
}
|
||||
if (strict != null) {
|
||||
builder = builder.strict(strict);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return ServerApi.class;
|
||||
}
|
||||
|
||||
private ServerApiVersion version() {
|
||||
try {
|
||||
// lookup by name eg. 'V1'
|
||||
return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// or just the version number, eg. just '1'
|
||||
return ServerApiVersion.findByValue(version);
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,18 +16,19 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.ProjectionInformation;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.util.Predicates;
|
||||
|
||||
/**
|
||||
* Common operations performed on properties of an entity like extracting fields information for projection creation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
class PropertyOperations {
|
||||
@@ -40,37 +41,37 @@ class PropertyOperations {
|
||||
|
||||
/**
|
||||
* For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for
|
||||
* creating the projection (target) type if the {@code targetType} is a {@literal DTO projection} or a
|
||||
* creating the projection (target) type if the {@code EntityProjection} is a {@literal DTO projection} or a
|
||||
* {@literal closed interface projection}.
|
||||
*
|
||||
* @param projectionFactory must not be {@literal null}.
|
||||
* @param projection must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @param targetType must not be {@literal null}.
|
||||
* @return {@link Document} with fields to be included.
|
||||
*/
|
||||
Document computeFieldsForProjection(ProjectionFactory projectionFactory, Document fields, Class<?> domainType,
|
||||
Class<?> targetType) {
|
||||
Document computeMappedFieldsForProjection(EntityProjection<?, ?> projection,
|
||||
Document fields) {
|
||||
|
||||
if (!fields.isEmpty() || ClassUtils.isAssignable(domainType, targetType)) {
|
||||
if (!projection.isClosedProjection()) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = new Document();
|
||||
|
||||
if (targetType.isInterface()) {
|
||||
|
||||
ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType);
|
||||
|
||||
if (projectionInformation.isClosed()) {
|
||||
projectionInformation.getInputProperties().forEach(it -> projectedFields.append(it.getName(), 1));
|
||||
}
|
||||
if (projection.getMappedType().getType().isInterface()) {
|
||||
projection.forEach(it -> {
|
||||
projectedFields.put(it.getPropertyPath().getSegment(), 1);
|
||||
});
|
||||
} else {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(targetType);
|
||||
if (entity != null) {
|
||||
entity.doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
// DTO projections use merged metadata between domain type and result type
|
||||
PersistentPropertyTranslator translator = PersistentPropertyTranslator.create(
|
||||
mappingContext.getRequiredPersistentEntity(projection.getDomainType()),
|
||||
Predicates.negate(MongoPersistentProperty::hasExplicitFieldName));
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getRequiredPersistentEntity(projection.getMappedType());
|
||||
for (MongoPersistentProperty property : persistentEntity) {
|
||||
projectedFields.put(translator.translate(property).getFieldName(), 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ import java.util.stream.Collectors;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -54,11 +55,10 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
@@ -288,45 +288,59 @@ class QueryOperations {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity,
|
||||
EntityProjection<?, ?> projection) {
|
||||
|
||||
Document fields = new Document();
|
||||
Document fields = evaluateFields(entity);
|
||||
|
||||
for (Entry<String, Object> entry : query.getFieldsObject().entrySet()) {
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document mappedFields;
|
||||
if (!fields.isEmpty()) {
|
||||
mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
} else {
|
||||
mappedFields = propertyOperations.computeMappedFieldsForProjection(projection, fields);
|
||||
mappedFields = queryMapper.addMetaAttributes(mappedFields, entity);
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && mappedFields.containsKey(entity.getTextScoreProperty().getFieldName())
|
||||
&& !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
if (mappedFields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
private Document evaluateFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
|
||||
if (fields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
Document evaluated = new Document();
|
||||
|
||||
for (Entry<String, Object> entry : fields.entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof MongoExpression) {
|
||||
|
||||
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
|
||||
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
|
||||
|
||||
fields.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||
evaluated.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||
} else {
|
||||
fields.put(entry.getKey(), entry.getValue());
|
||||
evaluated.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields, entity);
|
||||
} else {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields,
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
return evaluated;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -388,8 +402,8 @@ class QueryOperations {
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity,
|
||||
EntityProjection<?, ?> projection) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
ReactiveIndexOperations indexOps(Class<?> entityClass);
|
||||
|
||||
/**
|
||||
* Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the
|
||||
* Execute a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the
|
||||
* MongoDB driver to convert the JSON string to a Document. Any errors that result from executing this command will be
|
||||
* converted into Spring's DAO exception hierarchy.
|
||||
*
|
||||
@@ -939,18 +939,17 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* count all matches. <br />
|
||||
* This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
* aggregation execution} which may have an impact on performance.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the count of matching documents.
|
||||
* @see #exactCount(Query, Class)
|
||||
* @see #estimatedCount(Class)
|
||||
*/
|
||||
Mono<Long> count(Query query, Class<?> entityClass);
|
||||
|
||||
@@ -961,18 +960,17 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* count all matches. <br />
|
||||
* This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
* aggregation execution} which may have an impact on performance.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @see #count(Query, Class, String)
|
||||
* @see #estimatedCount(String)
|
||||
* @see #exactCount(Query, String)
|
||||
*/
|
||||
Mono<Long> count(Query query, String collectionName);
|
||||
|
||||
@@ -982,19 +980,18 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* count all matches. <br />
|
||||
* This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
* aggregation execution} which may have an impact on performance.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
* @param entityClass the parametrized type. Can be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @see #estimatedCount(String)
|
||||
* @see #exactCount(Query, Class, String)
|
||||
*/
|
||||
Mono<Long> count(Query query, @Nullable Class<?> entityClass, String collectionName);
|
||||
|
||||
@@ -1027,6 +1024,75 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*/
|
||||
Mono<Long> estimatedCount(String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the collection of the given entity class.
|
||||
* <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches. <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
* @return the count of matching documents.
|
||||
* @since 3.4
|
||||
*/
|
||||
default Mono<Long> exactCount(Query query, Class<?> entityClass) {
|
||||
return exactCount(query, entityClass, getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query}
|
||||
* must solely consist of document field references as we lack type information to map potential property references
|
||||
* onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches. <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @see #count(Query, Class, String)
|
||||
* @since 3.4
|
||||
*/
|
||||
default Mono<Long> exactCount(Query query, String collectionName) {
|
||||
return exactCount(query, null, collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
* <strong>NOTE:</strong> Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches. <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
* @param entityClass the parametrized type. Can be {@literal null}.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
* @return the count of matching documents.
|
||||
* @since 3.4
|
||||
*/
|
||||
Mono<Long> exactCount(Query query, @Nullable Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
@@ -1599,7 +1665,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param options additional options like output collection. Must not be {@literal null}.
|
||||
* @return a {@link Flux} emitting the result document sequence. Never {@literal null}.
|
||||
* @since 2.1
|
||||
* @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<T> mapReduce(Query filterQuery, Class<?> domainType, Class<T> resultType, String mapFunction,
|
||||
String reduceFunction, MapReduceOptions options);
|
||||
|
||||
@@ -1617,7 +1685,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* @param options additional options like output collection. Must not be {@literal null}.
|
||||
* @return a {@link Flux} emitting the result document sequence. Never {@literal null}.
|
||||
* @since 2.1
|
||||
* @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}.
|
||||
*/
|
||||
@Deprecated
|
||||
<T> Flux<T> mapReduce(Query filterQuery, Class<?> domainType, String inputCollectionName, Class<T> resultType,
|
||||
String mapFunction, String reduceFunction, MapReduceOptions options);
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2016-2022. the original author or authors.
|
||||
* Copyright 2016-2018 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -142,11 +142,118 @@ public class AccumulatorOperators {
|
||||
return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the
|
||||
* given field to calculate the population covariance of the two.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovariancePop covariancePop(String fieldReference) {
|
||||
return covariancePop().and(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the
|
||||
* given {@link AggregationExpression expression} to calculate the population covariance of the two.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovariancePop covariancePop(AggregationExpression expression) {
|
||||
return covariancePop().and(expression);
|
||||
}
|
||||
|
||||
private CovariancePop covariancePop() {
|
||||
return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the
|
||||
* given field to calculate the sample covariance of the two.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovarianceSamp covarianceSamp(String fieldReference) {
|
||||
return covarianceSamp().and(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the
|
||||
* given {@link AggregationExpression expression} to calculate the sample covariance of the two.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovarianceSamp covarianceSamp(AggregationExpression expression) {
|
||||
return covarianceSamp().and(expression);
|
||||
}
|
||||
|
||||
private CovarianceSamp covarianceSamp() {
|
||||
return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference)
|
||||
: CovarianceSamp.covarianceSampOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates
|
||||
* the exponential moving average of numeric values
|
||||
*
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public ExpMovingAvgBuilder expMovingAvg() {
|
||||
|
||||
ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference)
|
||||
: ExpMovingAvg.expMovingAvgOf(expression);
|
||||
return new ExpMovingAvgBuilder() {
|
||||
|
||||
@Override
|
||||
public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) {
|
||||
return expMovingAvg.n(numberOfHistoricalDocuments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExpMovingAvg alpha(double exponentialDecayValue) {
|
||||
return expMovingAvg.alpha(exponentialDecayValue);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link ExpMovingAvg}.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface ExpMovingAvgBuilder {
|
||||
|
||||
/**
|
||||
* Define the number of historical documents with significant mathematical weight.
|
||||
*
|
||||
* @param numberOfHistoricalDocuments
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments);
|
||||
|
||||
/**
|
||||
* Define the exponential decay value.
|
||||
*
|
||||
* @param exponentialDecayValue
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
ExpMovingAvg alpha(double exponentialDecayValue);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $sum}.
|
||||
*
|
||||
@@ -658,4 +765,185 @@ public class AccumulatorOperators {
|
||||
return super.toDocument(value, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $covariancePop}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class CovariancePop extends AbstractAggregationExpression {
|
||||
|
||||
private CovariancePop(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public static CovariancePop covariancePopOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new CovariancePop(asFields(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public static CovariancePop covariancePopOf(AggregationExpression expression) {
|
||||
return new CovariancePop(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public CovariancePop and(String fieldReference) {
|
||||
return new CovariancePop(append(asFields(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public CovariancePop and(AggregationExpression expression) {
|
||||
return new CovariancePop(append(expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$covariancePop";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $covarianceSamp}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class CovarianceSamp extends AbstractAggregationExpression {
|
||||
|
||||
private CovarianceSamp(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public static CovarianceSamp covarianceSampOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new CovarianceSamp(asFields(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public static CovarianceSamp covarianceSampOf(AggregationExpression expression) {
|
||||
return new CovarianceSamp(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public CovarianceSamp and(String fieldReference) {
|
||||
return new CovarianceSamp(append(asFields(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public CovarianceSamp and(AggregationExpression expression) {
|
||||
return new CovarianceSamp(append(expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$covarianceSamp";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ExpMovingAvg} calculates the exponential moving average of numeric values.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class ExpMovingAvg extends AbstractAggregationExpression {
|
||||
|
||||
private ExpMovingAvg(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public static ExpMovingAvg expMovingAvgOf(String fieldReference) {
|
||||
return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value
|
||||
* to be used as input.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) {
|
||||
return new ExpMovingAvg(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the number of historical documents with significant mathematical weight. <br />
|
||||
* Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both!
|
||||
*
|
||||
* @param numberOfHistoricalDocuments
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) {
|
||||
return new ExpMovingAvg(append("N", numberOfHistoricalDocuments));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the exponential decay value. <br />
|
||||
* Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both!
|
||||
*
|
||||
* @param exponentialDecayValue
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public ExpMovingAvg alpha(double exponentialDecayValue) {
|
||||
return new ExpMovingAvg(append("alpha", exponentialDecayValue));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$expMovingAvg";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,4 +201,5 @@ public class AddFieldsOperation extends DocumentEnhancingOperation {
|
||||
AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -499,6 +499,17 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} using the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MatchOperation}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static MatchOperation match(AggregationExpression expression) {
|
||||
return new MatchOperation(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -35,6 +35,7 @@ import org.springframework.util.Assert;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Shashank Sharma
|
||||
* @author Divya Srivastava
|
||||
* @since 1.0
|
||||
*/
|
||||
public class ArrayOperators {
|
||||
@@ -362,6 +363,38 @@ public class ArrayOperators {
|
||||
|
||||
return usesExpression() ? ArrayToObject.arrayValueOfToObject(expression) : ArrayToObject.arrayToObject(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that return the first element in the associated array.
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @return new instance of {@link First}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public First first() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return First.firstOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? First.firstOf(expression) : First.first(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that return the last element in the given array.
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @return new instance of {@link Last}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public Last last() {
|
||||
|
||||
if (usesFieldRef()) {
|
||||
return Last.lastOf(fieldReference);
|
||||
}
|
||||
|
||||
return usesExpression() ? Last.lastOf(expression) : Last.last(values);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
@@ -1812,4 +1845,112 @@ public class ArrayOperators {
|
||||
return "$arrayToObject";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $first} that returns the first element in an array. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @author Christoph Strobl
|
||||
* @since 3.4
|
||||
*/
|
||||
public static class First extends AbstractAggregationExpression {
|
||||
|
||||
private First(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first element in the given array.
|
||||
*
|
||||
* @param array must not be {@literal null}.
|
||||
* @return new instance of {@link First}.
|
||||
*/
|
||||
public static First first(Object array) {
|
||||
return new First(array);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first element in the array pointed to by the given {@link Field field reference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link First}.
|
||||
*/
|
||||
public static First firstOf(String fieldReference) {
|
||||
return new First(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first element of the array computed by the given {@link AggregationExpression expression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link First}.
|
||||
*/
|
||||
public static First firstOf(AggregationExpression expression) {
|
||||
return new First(expression);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AbstractAggregationExpression#getMongoMethod()
|
||||
*/
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$first";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $last} that returns the last element in an array. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @author Christoph Strobl
|
||||
* @since 3.4
|
||||
*/
|
||||
public static class Last extends AbstractAggregationExpression {
|
||||
|
||||
private Last(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the last element in the given array.
|
||||
*
|
||||
* @param array must not be {@literal null}.
|
||||
* @return new instance of {@link Last}.
|
||||
*/
|
||||
public static Last last(Object array) {
|
||||
return new Last(array);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the last element in the array pointed to by the given {@link Field field reference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link Last}.
|
||||
*/
|
||||
public static Last lastOf(String fieldReference) {
|
||||
return new Last(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the last element of the array computed buy the given {@link AggregationExpression expression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Last}.
|
||||
*/
|
||||
public static Last lastOf(AggregationExpression expression) {
|
||||
return new Last(expression);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AbstractAggregationExpression#getMongoMethod()
|
||||
*/
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$last";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@@ -235,7 +236,7 @@ public class ConditionalOperators {
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/</a>
|
||||
*/
|
||||
public static class IfNull implements AggregationExpression {
|
||||
|
||||
@@ -251,7 +252,8 @@ public class ConditionalOperators {
|
||||
/**
|
||||
* Creates new {@link IfNull}.
|
||||
*
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}.
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static ThenBuilder ifNull(String fieldReference) {
|
||||
@@ -264,7 +266,7 @@ public class ConditionalOperators {
|
||||
* Creates new {@link IfNull}.
|
||||
*
|
||||
* @param expression the expression to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static ThenBuilder ifNull(AggregationExpression expression) {
|
||||
@@ -282,19 +284,29 @@ public class ConditionalOperators {
|
||||
|
||||
List<Object> list = new ArrayList<Object>();
|
||||
|
||||
if (condition instanceof Field) {
|
||||
list.add(context.getReference((Field) condition).toString());
|
||||
} else if (condition instanceof AggregationExpression) {
|
||||
list.add(((AggregationExpression) condition).toDocument(context));
|
||||
if (condition instanceof Collection) {
|
||||
for (Object val : ((Collection) this.condition)) {
|
||||
list.add(mapCondition(val, context));
|
||||
}
|
||||
} else {
|
||||
list.add(condition);
|
||||
list.add(mapCondition(condition, context));
|
||||
}
|
||||
|
||||
list.add(resolve(value, context));
|
||||
|
||||
return new Document("$ifNull", list);
|
||||
}
|
||||
|
||||
private Object mapCondition(Object condition, AggregationOperationContext context) {
|
||||
|
||||
if (condition instanceof Field) {
|
||||
return context.getReference((Field) condition).toString();
|
||||
} else if (condition instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) condition).toDocument(context);
|
||||
} else {
|
||||
return condition;
|
||||
}
|
||||
}
|
||||
|
||||
private Object resolve(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
@@ -315,28 +327,48 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* {@literal null}.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder ifNull(String fieldReference);
|
||||
|
||||
/**
|
||||
* @param expression the expression to check for a {@literal null} value, field name must not be {@literal null}
|
||||
* or empty.
|
||||
* @return the {@link ThenBuilder}
|
||||
* or empty.
|
||||
* @return the {@link ThenBuilder}.
|
||||
*/
|
||||
ThenBuilder ifNull(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface OrBuilder {
|
||||
|
||||
/**
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder orIfNull(String fieldReference);
|
||||
|
||||
/**
|
||||
* @param expression the expression to check for a {@literal null} value,
|
||||
* @return the {@link ThenBuilder}.
|
||||
*/
|
||||
ThenBuilder orIfNull(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public interface ThenBuilder {
|
||||
public interface ThenBuilder extends OrBuilder {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a
|
||||
* {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB
|
||||
* representation but must not be {@literal null}.
|
||||
* {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB
|
||||
* representation but must not be {@literal null}.
|
||||
* @return new instance of {@link IfNull}.
|
||||
*/
|
||||
IfNull then(Object value);
|
||||
@@ -361,9 +393,10 @@ public class ConditionalOperators {
|
||||
*/
|
||||
static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder {
|
||||
|
||||
private @Nullable Object condition;
|
||||
private @Nullable List<Object> conditions;
|
||||
|
||||
private IfNullOperatorBuilder() {
|
||||
conditions = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -381,7 +414,7 @@ public class ConditionalOperators {
|
||||
public ThenBuilder ifNull(String fieldReference) {
|
||||
|
||||
Assert.hasText(fieldReference, "FieldReference name must not be null or empty!");
|
||||
this.condition = Fields.field(fieldReference);
|
||||
this.conditions.add(Fields.field(fieldReference));
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -392,15 +425,25 @@ public class ConditionalOperators {
|
||||
public ThenBuilder ifNull(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "AggregationExpression name must not be null or empty!");
|
||||
this.condition = expression;
|
||||
this.conditions.add(expression);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ThenBuilder orIfNull(String fieldReference) {
|
||||
return ifNull(fieldReference);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ThenBuilder orIfNull(AggregationExpression expression) {
|
||||
return ifNull(expression);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object)
|
||||
*/
|
||||
public IfNull then(Object value) {
|
||||
return new IfNull(condition, value);
|
||||
return new IfNull(conditions, value);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -409,7 +452,7 @@ public class ConditionalOperators {
|
||||
public IfNull thenValueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new IfNull(condition, Fields.field(fieldReference));
|
||||
return new IfNull(conditions, Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -418,7 +461,7 @@ public class ConditionalOperators {
|
||||
public IfNull thenValueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new IfNull(condition, expression);
|
||||
return new IfNull(conditions, expression);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -458,7 +501,7 @@ public class ConditionalOperators {
|
||||
public static Switch switchCases(List<CaseOperator> conditions) {
|
||||
|
||||
Assert.notNull(conditions, "Conditions must not be null!");
|
||||
return new Switch(Collections.<String, Object>singletonMap("branches", new ArrayList<CaseOperator>(conditions)));
|
||||
return new Switch(Collections.<String, Object> singletonMap("branches", new ArrayList<CaseOperator>(conditions)));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -545,7 +588,7 @@ public class ConditionalOperators {
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/</a>
|
||||
*/
|
||||
public static class Cond implements AggregationExpression {
|
||||
|
||||
@@ -806,8 +849,8 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the condition evaluates {@literal true}. Can be a {@link Document}, a
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* @return the {@link OtherwiseBuilder}
|
||||
*/
|
||||
OtherwiseBuilder then(Object value);
|
||||
@@ -832,8 +875,8 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the condition evaluates {@literal false}. Can be a {@link Document}, a
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* @return the {@link Cond}
|
||||
*/
|
||||
Cond otherwise(Object value);
|
||||
@@ -861,8 +904,7 @@ public class ConditionalOperators {
|
||||
private @Nullable Object condition;
|
||||
private @Nullable Object thenValue;
|
||||
|
||||
private ConditionalExpressionBuilder() {
|
||||
}
|
||||
private ConditionalExpressionBuilder() {}
|
||||
|
||||
/**
|
||||
* Creates a new builder for {@link Cond}.
|
||||
|
||||
@@ -231,6 +231,17 @@ public class ConvertOperators {
|
||||
return ToString.toString(valueObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to
|
||||
* radians.
|
||||
*
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public DegreesToRadians convertDegreesToRadians() {
|
||||
return DegreesToRadians.degreesToRadians(valueObject());
|
||||
}
|
||||
|
||||
private Convert createConvert() {
|
||||
return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression);
|
||||
}
|
||||
@@ -692,4 +703,52 @@ public class ConvertOperators {
|
||||
return "$toString";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DegreesToRadians extends AbstractAggregationExpression {
|
||||
|
||||
private DegreesToRadians(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians.
|
||||
*
|
||||
* @param fieldName must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadiansOf(String fieldName) {
|
||||
return degreesToRadians(Fields.field(fieldName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) {
|
||||
return degreesToRadians(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadians(Object value) {
|
||||
return new DegreesToRadians(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$degreesToRadians";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,9 +15,16 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -45,6 +52,19 @@ public class DateOperators {
|
||||
return new DateOperatorFactory(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the date referenced by given {@literal fieldReference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DateOperatorFactory}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new DateOperatorFactory(fieldReference).withTimezone(timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the date resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
@@ -57,6 +77,19 @@ public class DateOperators {
|
||||
return new DateOperatorFactory(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the date resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DateOperatorFactory}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new DateOperatorFactory(expression).withTimezone(timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the given value as date.
|
||||
* <br />
|
||||
@@ -130,6 +163,7 @@ public class DateOperators {
|
||||
* <strong>NOTE: </strong>Support for timezones in aggregations Requires MongoDB 3.6 or later.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class Timezone {
|
||||
@@ -156,7 +190,7 @@ public class DateOperators {
|
||||
* representing an Olson Timezone Identifier or UTC Offset.
|
||||
*
|
||||
* @param value the plain timezone {@link String}, a {@link Field} holding the timezone or an
|
||||
* {@link AggregationExpression} resulting in the timezone.
|
||||
* {@link AggregationExpression} resulting in the timezone.
|
||||
* @return new instance of {@link Timezone}.
|
||||
*/
|
||||
public static Timezone valueOf(Object value) {
|
||||
@@ -165,6 +199,61 @@ public class DateOperators {
|
||||
return new Timezone(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param timeZone {@link TimeZone} rendering the offset as UTC offset.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromOffset(TimeZone timeZone) {
|
||||
|
||||
Assert.notNull(timeZone, "TimeZone must not be null!");
|
||||
|
||||
return fromOffset(
|
||||
ZoneOffset.ofTotalSeconds(Math.toIntExact(TimeUnit.MILLISECONDS.toSeconds(timeZone.getRawOffset()))));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link ZoneOffset} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param offset {@link ZoneOffset} rendering the offset as UTC offset.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromOffset(ZoneOffset offset) {
|
||||
|
||||
Assert.notNull(offset, "ZoneOffset must not be null!");
|
||||
return new Timezone(offset.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param timeZone {@link Timezone} rendering the offset as zone identifier.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromZone(TimeZone timeZone) {
|
||||
|
||||
Assert.notNull(timeZone, "TimeZone must not be null!");
|
||||
|
||||
return valueOf(timeZone.getID());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link java.time.ZoneId} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param zoneId {@link ZoneId} rendering the offset as zone identifier.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromZone(ZoneId zoneId) {
|
||||
|
||||
Assert.notNull(zoneId, "ZoneId must not be null!");
|
||||
return new Timezone(zoneId.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the {@link Field} reference holding the Olson Timezone Identifier or UTC Offset.
|
||||
*
|
||||
@@ -185,6 +274,11 @@ public class DateOperators {
|
||||
public static Timezone ofExpression(AggregationExpression expression) {
|
||||
return valueOf(expression);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
Object getValue() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -274,6 +368,89 @@ public class DateOperators {
|
||||
return new DateOperatorFactory(fieldReference, expression, dateValue, timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression
|
||||
* expression} (in {@literal units}).
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(AggregationExpression expression, String unit) {
|
||||
return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression
|
||||
* expression} (in {@literal units}).
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(AggregationExpression expression, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
return applyTimezone(DateAdd.addValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()),
|
||||
timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in
|
||||
* {@literal units}).
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(String fieldReference, String unit) {
|
||||
return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in
|
||||
* {@literal units}).
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(String fieldReference, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(
|
||||
DateAdd.addValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the given value (in {@literal units}).
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return
|
||||
* @since 3.3 new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd add(Object value, String unit) {
|
||||
return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the given value (in {@literal units}).
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return
|
||||
* @since 3.3 new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd add(Object value, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(DateAdd.addValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()),
|
||||
timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and
|
||||
* 366.
|
||||
@@ -304,6 +481,90 @@ public class DateOperators {
|
||||
return applyTimezone(DayOfWeek.dayOfWeek(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date
|
||||
* computed by the given {@link AggregationExpression expression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(AggregationExpression expression, String unit) {
|
||||
return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date
|
||||
* computed by the given {@link AggregationExpression expression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(AggregationExpression expression, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(
|
||||
DateDiff.diffValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored
|
||||
* at the given {@literal field}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(String fieldReference, String unit) {
|
||||
return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored
|
||||
* at the given {@literal field}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(String fieldReference, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(
|
||||
DateDiff.diffValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given
|
||||
* {@literal value}.
|
||||
*
|
||||
* @param value anything the resolves to a valid date. Must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diff(Object value, String unit) {
|
||||
return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given
|
||||
* {@literal value}.
|
||||
*
|
||||
* @param value anything the resolves to a valid date. Must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diff(Object value, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(DateDiff.diffValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()),
|
||||
timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that returns the year portion of a date.
|
||||
*
|
||||
@@ -1480,7 +1741,6 @@ public class DateOperators {
|
||||
} else {
|
||||
clone.put("timezone", ((Timezone) value).value);
|
||||
}
|
||||
|
||||
} else {
|
||||
clone.put(key, value);
|
||||
}
|
||||
@@ -1911,7 +2171,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class DateFromParts extends TimezonedDateAggregationExpression implements DateParts<DateFromParts> {
|
||||
@@ -2086,7 +2346,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class IsoDateFromParts extends TimezonedDateAggregationExpression
|
||||
@@ -2262,7 +2522,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class DateToParts extends TimezonedDateAggregationExpression {
|
||||
@@ -2343,7 +2603,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class DateFromString extends TimezonedDateAggregationExpression {
|
||||
@@ -2418,6 +2678,290 @@ public class DateOperators {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $dateAdd}.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 5.0 or later.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DateAdd extends TimezonedDateAggregationExpression {
|
||||
|
||||
private DateAdd(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a
|
||||
* {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateAdd addValueOf(AggregationExpression expression, String unit) {
|
||||
return addValue(expression, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateAdd addValueOf(String fieldReference, String unit) {
|
||||
return addValue(Fields.field(fieldReference), unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateAdd addValue(Object value, String unit) {
|
||||
|
||||
Map<String, Object> args = new HashMap<>();
|
||||
args.put("unit", unit);
|
||||
args.put("amount", value);
|
||||
return new DateAdd(args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd toDateOf(AggregationExpression expression) {
|
||||
return toDate(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd toDateOf(String fieldReference) {
|
||||
return toDate(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd toDate(Object dateExpression) {
|
||||
return new DateAdd(append("startDate", dateExpression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
|
||||
*
|
||||
* @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd withTimezone(Timezone timezone) {
|
||||
return new DateAdd(appendTimezone(argumentMap(), timezone));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$dateAdd";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $dateDiff}.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 5.0 or later.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DateDiff extends TimezonedDateAggregationExpression {
|
||||
|
||||
private DateDiff(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a
|
||||
* {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateDiff diffValueOf(AggregationExpression expression, String unit) {
|
||||
return diffValue(expression, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateDiff diffValueOf(String fieldReference, String unit) {
|
||||
return diffValue(Fields.field(fieldReference), unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateDiff diffValue(Object value, String unit) {
|
||||
|
||||
Map<String, Object> args = new HashMap<>();
|
||||
args.put("unit", unit);
|
||||
args.put("endDate", value);
|
||||
return new DateDiff(args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff toDateOf(AggregationExpression expression) {
|
||||
return toDate(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff toDateOf(String fieldReference) {
|
||||
return toDate(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff toDate(Object dateExpression) {
|
||||
return new DateDiff(append("startDate", dateExpression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
|
||||
*
|
||||
* @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff withTimezone(Timezone timezone) {
|
||||
return new DateDiff(appendTimezone(argumentMap(), timezone));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the start day of the week if the unit if measure is set to {@literal week}. Uses {@literal Sunday} by
|
||||
* default.
|
||||
*
|
||||
* @param day must not be {@literal null}.
|
||||
* @return new instance of {@link DateDiff}.
|
||||
*/
|
||||
public DateDiff startOfWeek(Object day) {
|
||||
return new DateDiff(append("startOfWeek", day));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$dateDiff";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface defining a temporal unit for date operators.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface TemporalUnit {
|
||||
|
||||
String name();
|
||||
|
||||
/**
|
||||
* Converts the given time unit into a {@link TemporalUnit}. Supported units are: days, hours, minutes, seconds, and
|
||||
* milliseconds.
|
||||
*
|
||||
* @param timeUnit the time unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static TemporalUnit from(TimeUnit timeUnit) {
|
||||
|
||||
Assert.notNull(timeUnit, "TimeUnit must not be null");
|
||||
|
||||
switch (timeUnit) {
|
||||
case DAYS:
|
||||
return TemporalUnits.DAY;
|
||||
case HOURS:
|
||||
return TemporalUnits.HOUR;
|
||||
case MINUTES:
|
||||
return TemporalUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return TemporalUnits.SECOND;
|
||||
case MILLISECONDS:
|
||||
return TemporalUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", timeUnit));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given chrono unit into a {@link TemporalUnit}. Supported units are: years, weeks, months, days,
|
||||
* hours, minutes, seconds, and millis.
|
||||
*
|
||||
* @param chronoUnit the chrono unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static TemporalUnit from(ChronoUnit chronoUnit) {
|
||||
|
||||
switch (chronoUnit) {
|
||||
case YEARS:
|
||||
return TemporalUnits.YEAR;
|
||||
case WEEKS:
|
||||
return TemporalUnits.WEEK;
|
||||
case MONTHS:
|
||||
return TemporalUnits.MONTH;
|
||||
case DAYS:
|
||||
return TemporalUnits.DAY;
|
||||
case HOURS:
|
||||
return TemporalUnits.HOUR;
|
||||
case MINUTES:
|
||||
return TemporalUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return TemporalUnits.SECOND;
|
||||
case MILLIS:
|
||||
return TemporalUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", chronoUnit));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Supported temporal units.
|
||||
*/
|
||||
enum TemporalUnits implements TemporalUnit {
|
||||
YEAR, QUARTER, WEEK, MONTH, DAY, HOUR, MINUTE, SECOND, MILLISECOND
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T extends TimezonedDateAggregationExpression> T applyTimezone(T instance, Timezone timezone) {
|
||||
return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone()
|
||||
|
||||
@@ -0,0 +1,222 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
/**
|
||||
* Gateway to {@literal document expressions} such as {@literal $rank, $documentNumber, etc.}
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class DocumentOperators {
|
||||
|
||||
/**
|
||||
* Obtain the document position (including gaps) relative to others (rank).
|
||||
*
|
||||
* @return new instance of {@link Rank}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Rank rank() {
|
||||
return new Rank();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the document position (without gaps) relative to others (rank).
|
||||
*
|
||||
* @return new instance of {@link DenseRank}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DenseRank denseRank() {
|
||||
return new DenseRank();
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the field referenced by given {@literal fieldReference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DocumentOperatorsFactory}.
|
||||
*/
|
||||
public static DocumentOperatorsFactory valueOf(String fieldReference) {
|
||||
return new DocumentOperatorsFactory(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the value resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DocumentOperatorsFactory}.
|
||||
*/
|
||||
public static DocumentOperatorsFactory valueOf(AggregationExpression expression) {
|
||||
return new DocumentOperatorsFactory(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the current document position.
|
||||
*
|
||||
* @return new instance of {@link DocumentNumber}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DocumentNumber documentNumber() {
|
||||
return new DocumentNumber();
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class DocumentOperatorsFactory {
|
||||
|
||||
private final Object target;
|
||||
|
||||
public DocumentOperatorsFactory(Object target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that applies the expression to a document at specified position
|
||||
* relative to the current document.
|
||||
*
|
||||
* @param by the value to add to the current position.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift shift(int by) {
|
||||
|
||||
Shift shift = usesExpression() ? Shift.shift((AggregationExpression) target) : Shift.shift(target.toString());
|
||||
return shift.by(by);
|
||||
}
|
||||
|
||||
private boolean usesExpression() {
|
||||
return target instanceof AggregationExpression;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents
|
||||
* occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class Rank implements AggregationExpression {
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$rank", new Document());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple
|
||||
* documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next
|
||||
* rank without any gaps.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DenseRank implements AggregationExpression {
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$denseRank", new Document());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentNumber} resolves the current document position.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DocumentNumber implements AggregationExpression {
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$documentNumber", new Document());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Shift applies an expression to a document in a specified position relative to the current document.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class Shift extends AbstractAggregationExpression {
|
||||
|
||||
private Shift(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the field to evaluate and return.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public static Shift shift(String fieldReference) {
|
||||
return new Shift(Collections.singletonMap("output", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the {@link AggregationExpression expression} to evaluate and return.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public static Shift shift(AggregationExpression expression) {
|
||||
return new Shift(Collections.singletonMap("output", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Shift the document position relative to the current. Use a positive value for follow up documents (eg. 1 for the
|
||||
* next) or a negative value for the predecessor documents (eg. -1 for the previous).
|
||||
*
|
||||
* @param shiftBy value to add to the current position.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift by(int shiftBy) {
|
||||
return new Shift(append("by", shiftBy));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the default value if the target document is out of range.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift defaultTo(Object value) {
|
||||
return new Shift(append("default", value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the {@link AggregationExpression expression} to evaluate if the target document is out of range.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift defaultToValueOf(AggregationExpression expression) {
|
||||
return defaultTo(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$shift";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Gateway to {@literal evaluation operators} such as {@literal $expr}.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public class EvaluationOperators {
|
||||
|
||||
/**
|
||||
* Take the value resulting from the given fieldReference.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link EvaluationOperatorFactory}.
|
||||
*/
|
||||
public static EvaluationOperatorFactory valueOf(String fieldReference) {
|
||||
return new EvaluationOperatorFactory(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the value resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link EvaluationOperatorFactory}.
|
||||
*/
|
||||
public static EvaluationOperatorFactory valueOf(AggregationExpression expression) {
|
||||
return new EvaluationOperatorFactory(expression);
|
||||
}
|
||||
|
||||
public static class EvaluationOperatorFactory {
|
||||
|
||||
private final String fieldReference;
|
||||
private final AggregationExpression expression;
|
||||
|
||||
/**
|
||||
* Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
*/
|
||||
public EvaluationOperatorFactory(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
this.fieldReference = fieldReference;
|
||||
this.expression = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
*/
|
||||
public EvaluationOperatorFactory(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
this.fieldReference = null;
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that is a valid aggregation expression.
|
||||
*
|
||||
* @return new instance of {@link Expr}.
|
||||
*/
|
||||
public Expr expr() {
|
||||
return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows the use of aggregation expressions within the query language.
|
||||
*/
|
||||
public static class Expr extends AbstractAggregationExpression {
|
||||
|
||||
private Expr(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$expr";
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Expr}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link Expr}.
|
||||
*/
|
||||
public static Expr valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new Expr(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Expr}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Expr}.
|
||||
*/
|
||||
public static Expr valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new Expr(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@code $expr} as {@link CriteriaDefinition}.
|
||||
*
|
||||
* @return the {@link CriteriaDefinition} from this expression.
|
||||
*/
|
||||
public CriteriaDefinition toCriteriaDefinition(AggregationOperationContext context) {
|
||||
|
||||
Document criteriaObject = toDocument(context);
|
||||
|
||||
return new CriteriaDefinition() {
|
||||
@Override
|
||||
public Document getCriteriaObject() {
|
||||
return criteriaObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKey() {
|
||||
return getMongoMethod();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -16,6 +16,7 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -29,6 +30,7 @@ import org.springframework.util.Assert;
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Divya Srivastava
|
||||
* @since 1.3
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/match/">MongoDB Aggregation Framework:
|
||||
* $match</a>
|
||||
@@ -36,6 +38,7 @@ import org.springframework.util.Assert;
|
||||
public class MatchOperation implements AggregationOperation {
|
||||
|
||||
private final CriteriaDefinition criteriaDefinition;
|
||||
private final AggregationExpression expression;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}.
|
||||
@@ -45,7 +48,23 @@ public class MatchOperation implements AggregationOperation {
|
||||
public MatchOperation(CriteriaDefinition criteriaDefinition) {
|
||||
|
||||
Assert.notNull(criteriaDefinition, "Criteria must not be null!");
|
||||
|
||||
this.criteriaDefinition = criteriaDefinition;
|
||||
this.expression = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public MatchOperation(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
this.criteriaDefinition = null;
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -54,7 +73,9 @@ public class MatchOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document(getOperator(), context.getMappedObject(criteriaDefinition.getCriteriaObject()));
|
||||
|
||||
return new Document(getOperator(),
|
||||
context.getMappedObject(expression != null ? expression.toDocument() : criteriaDefinition.getCriteriaObject()));
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -21,6 +21,7 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.expression.spel.ast.Projection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -193,5 +193,6 @@ public class SetOperation extends DocumentEnhancingOperation {
|
||||
*/
|
||||
SetOperation withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,873 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code setWindowFields}-operation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/">https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/</a>
|
||||
*/
|
||||
public class SetWindowFieldsOperation
|
||||
implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation {
|
||||
|
||||
private static final String CURRENT = "current";
|
||||
private static final String UNBOUNDED = "unbounded";
|
||||
|
||||
private final @Nullable Object partitionBy;
|
||||
private final @Nullable AggregationOperation sortBy;
|
||||
private final WindowOutput output;
|
||||
|
||||
/**
|
||||
* Create a new {@link SetWindowFieldsOperation} with given args.
|
||||
*
|
||||
* @param partitionBy The field or {@link AggregationExpression} to group by.
|
||||
* @param sortBy the {@link SortOperation operation} to sort the documents by in the partition.
|
||||
* @param output the {@link WindowOutput} containing the fields to add and the rules to calculate their respective
|
||||
* values.
|
||||
*/
|
||||
protected SetWindowFieldsOperation(@Nullable Object partitionBy, @Nullable AggregationOperation sortBy,
|
||||
WindowOutput output) {
|
||||
|
||||
this.partitionBy = partitionBy;
|
||||
this.sortBy = sortBy;
|
||||
this.output = output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link SetWindowFieldsOperationBuilder builder} to create a {@link SetWindowFieldsOperation}.
|
||||
*
|
||||
* @return new instance of {@link SetWindowFieldsOperationBuilder}.
|
||||
*/
|
||||
public static SetWindowFieldsOperationBuilder builder() {
|
||||
return new SetWindowFieldsOperationBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return ExposedFields.nonSynthetic(Fields.from(output.fields.toArray(new Field[0])));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
Document $setWindowFields = new Document();
|
||||
if (partitionBy != null) {
|
||||
if (partitionBy instanceof AggregationExpression) {
|
||||
$setWindowFields.append("partitionBy", ((AggregationExpression) partitionBy).toDocument(context));
|
||||
} else if (partitionBy instanceof Field) {
|
||||
$setWindowFields.append("partitionBy", context.getReference((Field) partitionBy).toString());
|
||||
} else {
|
||||
$setWindowFields.append("partitionBy", partitionBy);
|
||||
}
|
||||
}
|
||||
|
||||
if (sortBy != null) {
|
||||
$setWindowFields.append("sortBy", sortBy.toDocument(context).get(sortBy.getOperator()));
|
||||
}
|
||||
|
||||
Document output = new Document();
|
||||
for (ComputedField field : this.output.fields) {
|
||||
|
||||
Document fieldOperation = field.getWindowOperator().toDocument(context);
|
||||
if (field.window != null) {
|
||||
fieldOperation.put("window", field.window.toDocument(context));
|
||||
}
|
||||
output.append(field.getName(), fieldOperation);
|
||||
}
|
||||
$setWindowFields.append("output", output);
|
||||
|
||||
return new Document(getOperator(), $setWindowFields);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator()
|
||||
*/
|
||||
@Override
|
||||
public String getOperator() {
|
||||
return "$setWindowFields";
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link WindowOutput} defines output of {@literal $setWindowFields} stage by defining the {@link ComputedField
|
||||
* field(s)} to append to the documents in the output.
|
||||
*/
|
||||
public static class WindowOutput {
|
||||
|
||||
private final List<ComputedField> fields;
|
||||
|
||||
/**
|
||||
* Create a new output containing the single given {@link ComputedField field}.
|
||||
*
|
||||
* @param outputField must not be {@literal null}.
|
||||
*/
|
||||
public WindowOutput(ComputedField outputField) {
|
||||
|
||||
Assert.notNull(outputField, "OutputField must not be null!");
|
||||
|
||||
this.fields = new ArrayList<>();
|
||||
this.fields.add(outputField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the given {@link ComputedField field} to the outptut.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WindowOutput append(ComputedField field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
|
||||
fields.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the given {@link AggregationExpression} as a {@link ComputedField field} in a fluent way.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ComputedFieldAppender}.
|
||||
* @see #append(ComputedField)
|
||||
*/
|
||||
public ComputedFieldAppender append(AggregationExpression expression) {
|
||||
|
||||
return new ComputedFieldAppender() {
|
||||
|
||||
@Nullable private Window window;
|
||||
|
||||
@Override
|
||||
public WindowOutput as(String fieldname) {
|
||||
|
||||
return WindowOutput.this.append(new ComputedField(fieldname, expression, window));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ComputedFieldAppender within(Window window) {
|
||||
this.window = window;
|
||||
return this;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Tiny little helper to allow fluent API usage for {@link #append(ComputedField)}.
|
||||
*/
|
||||
interface ComputedFieldAppender {
|
||||
|
||||
/**
|
||||
* Specify the target field name.
|
||||
*
|
||||
* @param fieldname the name of field to add to the target document.
|
||||
* @return the {@link WindowOutput} that started the append operation.
|
||||
*/
|
||||
WindowOutput as(String fieldname);
|
||||
|
||||
/**
|
||||
* Specify the window boundaries.
|
||||
*
|
||||
* @param window must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
ComputedFieldAppender within(Window window);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Field} that the result of a computation done via an {@link AggregationExpression}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class ComputedField implements Field {
|
||||
|
||||
private final String name;
|
||||
private final AggregationExpression windowOperator;
|
||||
private final @Nullable Window window;
|
||||
|
||||
/**
|
||||
* Create a new {@link ComputedField}.
|
||||
*
|
||||
* @param name the target field name.
|
||||
* @param windowOperator the expression to calculate the field value.
|
||||
*/
|
||||
public ComputedField(String name, AggregationExpression windowOperator) {
|
||||
this(name, windowOperator, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ComputedField}.
|
||||
*
|
||||
* @param name the target field name.
|
||||
* @param windowOperator the expression to calculate the field value.
|
||||
* @param window the boundaries to operate within. Can be {@literal null}.
|
||||
*/
|
||||
public ComputedField(String name, AggregationExpression windowOperator, @Nullable Window window) {
|
||||
|
||||
this.name = name;
|
||||
this.windowOperator = windowOperator;
|
||||
this.window = window;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public AggregationExpression getWindowOperator() {
|
||||
return windowOperator;
|
||||
}
|
||||
|
||||
public Window getWindow() {
|
||||
return window;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick access to {@link DocumentWindow documents} and {@literal RangeWindow range} {@link Window windows}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface Windows {
|
||||
|
||||
/**
|
||||
* Create a document window relative to the position of the current document.
|
||||
*
|
||||
* @param lower an integer for a position relative to the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @param upper an integer for a position relative to the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @return new instance of {@link DocumentWindow}.
|
||||
*/
|
||||
static DocumentWindow documents(Object lower, Object upper) {
|
||||
return new DocumentWindow(lower, upper);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range window defined based on sort expression.
|
||||
*
|
||||
* @param lower a numeric value to add the sort by field value of the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @param upper a numeric value to add the sort by field value of the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @return new instance of {@link RangeWindow}.
|
||||
*/
|
||||
static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) {
|
||||
return new RangeWindow(lower, upper, unit == null ? WindowUnits.DEFAULT : unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range window based on the {@link Sort sort value} of the current document via a fluent API.
|
||||
*
|
||||
* @return new instance of {@link RangeWindowBuilder}.
|
||||
*/
|
||||
static RangeWindowBuilder range() {
|
||||
return new RangeWindowBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a document window relative to the position of the current document via a fluent API.
|
||||
*
|
||||
* @return new instance of {@link DocumentWindowBuilder}.
|
||||
*/
|
||||
static DocumentWindowBuilder documents() {
|
||||
return new DocumentWindowBuilder();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Window} to be used for {@link ComputedField#getWindow() ComputedField}.
|
||||
*/
|
||||
public interface Window {
|
||||
|
||||
/**
|
||||
* The lower (inclusive) boundary.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Object getLower();
|
||||
|
||||
/**
|
||||
* The upper (inclusive) boundary.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Object getUpper();
|
||||
|
||||
/**
|
||||
* Obtain the document representation of the window in a default {@link AggregationOperationContext context}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
default Document toDocument() {
|
||||
return toDocument(Aggregation.DEFAULT_CONTEXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the document representation of the window in the given {@link AggregationOperationContext context}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document toDocument(AggregationOperationContext ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder API for a {@link RangeWindow}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class RangeWindowBuilder {
|
||||
|
||||
private @Nullable Object lower;
|
||||
private @Nullable Object upper;
|
||||
private @Nullable WindowUnit unit;
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit based on the sortBy field.
|
||||
*
|
||||
* @param lower eg. {@literal current} or {@literal unbounded}.
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder from(String lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The upper (inclusive) range limit based on the sortBy field.
|
||||
*
|
||||
* @param upper eg. {@literal current} or {@literal unbounded}.
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder to(String upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for
|
||||
* a position before the current document. Use a positive integer for a position after the current document.
|
||||
* {@code 0} is the current document position.
|
||||
*
|
||||
* @param lower
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder from(Number lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The upper (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for
|
||||
* a position before the current document. Use a positive integer for a position after the current document.
|
||||
* {@code 0} is the current document position.
|
||||
*
|
||||
* @param upper
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder to(Number upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal current} as {@link #from(String) lower} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder fromCurrent() {
|
||||
return from(CURRENT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal unbounded} as {@link #from(String) lower} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder fromUnbounded() {
|
||||
return from(UNBOUNDED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal current} as {@link #to(String) upper} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder toCurrent() {
|
||||
return to(CURRENT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal unbounded} as {@link #to(String) upper} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder toUnbounded() {
|
||||
return to(UNBOUNDED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WindowUnit unit} or measure for the given {@link Window}.
|
||||
*
|
||||
* @param windowUnit must not be {@literal null}. Can be on of {@link Windows}.
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder unit(WindowUnit windowUnit) {
|
||||
|
||||
Assert.notNull(windowUnit, "WindowUnit must not be null");
|
||||
this.unit = windowUnit;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the {@link RangeWindow}.
|
||||
*
|
||||
* @return new instance of {@link RangeWindow}.
|
||||
*/
|
||||
public RangeWindow build() {
|
||||
|
||||
Assert.notNull(lower, "Lower bound must not be null");
|
||||
Assert.notNull(upper, "Upper bound must not be null");
|
||||
Assert.notNull(unit, "WindowUnit bound must not be null");
|
||||
|
||||
return new RangeWindow(lower, upper, unit);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder API for a {@link RangeWindow}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class DocumentWindowBuilder {
|
||||
|
||||
private @Nullable Object lower;
|
||||
private @Nullable Object upper;
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit based on current document. Use a negative integer for a position before the
|
||||
* current document. Use a positive integer for a position after the current document. {@code 0} is the current
|
||||
* document position.
|
||||
*
|
||||
* @param lower
|
||||
* @return this.
|
||||
*/
|
||||
public DocumentWindowBuilder from(Number lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder fromCurrent() {
|
||||
return from(CURRENT);
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder fromUnbounded() {
|
||||
return from(UNBOUNDED);
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder to(String upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit based on current document.
|
||||
*
|
||||
* @param lower eg. {@literal current} or {@literal unbounded}.
|
||||
* @return this.
|
||||
*/
|
||||
public DocumentWindowBuilder from(String lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The upper (inclusive) range limit based on current document. Use a negative integer for a position before the
|
||||
* current document. Use a positive integer for a position after the current document. {@code 0} is the current
|
||||
* document position.
|
||||
*
|
||||
* @param upper
|
||||
* @return this.
|
||||
*/
|
||||
public DocumentWindowBuilder to(Number upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder toCurrent() {
|
||||
return to(CURRENT);
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder toUnbounded() {
|
||||
return to(UNBOUNDED);
|
||||
}
|
||||
|
||||
public DocumentWindow build() {
|
||||
|
||||
Assert.notNull(lower, "Lower bound must not be null");
|
||||
Assert.notNull(upper, "Upper bound must not be null");
|
||||
|
||||
return new DocumentWindow(lower, upper);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Common base class for {@link Window} implementation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static abstract class WindowImpl implements Window {
|
||||
|
||||
private final Object lower;
|
||||
private final Object upper;
|
||||
|
||||
protected WindowImpl(Object lower, Object upper) {
|
||||
this.lower = lower;
|
||||
this.upper = upper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getLower() {
|
||||
return lower;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getUpper() {
|
||||
return upper;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Window} implementation based on the current document.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class DocumentWindow extends WindowImpl {
|
||||
|
||||
DocumentWindow(Object lower, Object upper) {
|
||||
super(lower, upper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext ctx) {
|
||||
return new Document("documents", Arrays.asList(getLower(), getUpper()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Window} implementation based on the sort fields.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class RangeWindow extends WindowImpl {
|
||||
|
||||
private final WindowUnit unit;
|
||||
|
||||
protected RangeWindow(Object lower, Object upper, WindowUnit unit) {
|
||||
|
||||
super(lower, upper);
|
||||
this.unit = unit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext ctx) {
|
||||
|
||||
Document range = new Document("range", new Object[] { getLower(), getUpper() });
|
||||
if (unit != null && !WindowUnits.DEFAULT.equals(unit)) {
|
||||
range.append("unit", unit.name().toLowerCase());
|
||||
}
|
||||
return range;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The actual time unit to apply to a {@link Window}.
|
||||
*/
|
||||
public interface WindowUnit {
|
||||
|
||||
String name();
|
||||
|
||||
/**
|
||||
* Converts the given time unit into a {@link WindowUnit}. Supported units are: days, hours, minutes, seconds, and
|
||||
* milliseconds.
|
||||
*
|
||||
* @param timeUnit the time unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static WindowUnit from(TimeUnit timeUnit) {
|
||||
|
||||
Assert.notNull(timeUnit, "TimeUnit must not be null");
|
||||
|
||||
switch (timeUnit) {
|
||||
case DAYS:
|
||||
return WindowUnits.DAY;
|
||||
case HOURS:
|
||||
return WindowUnits.HOUR;
|
||||
case MINUTES:
|
||||
return WindowUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return WindowUnits.SECOND;
|
||||
case MILLISECONDS:
|
||||
return WindowUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", timeUnit));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given chrono unit into a {@link WindowUnit}. Supported units are: years, weeks, months, days, hours,
|
||||
* minutes, seconds, and millis.
|
||||
*
|
||||
* @param chronoUnit the chrono unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static WindowUnit from(ChronoUnit chronoUnit) {
|
||||
|
||||
switch (chronoUnit) {
|
||||
case YEARS:
|
||||
return WindowUnits.YEAR;
|
||||
case WEEKS:
|
||||
return WindowUnits.WEEK;
|
||||
case MONTHS:
|
||||
return WindowUnits.MONTH;
|
||||
case DAYS:
|
||||
return WindowUnits.DAY;
|
||||
case HOURS:
|
||||
return WindowUnits.HOUR;
|
||||
case MINUTES:
|
||||
return WindowUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return WindowUnits.SECOND;
|
||||
case MILLIS:
|
||||
return WindowUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", chronoUnit));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick access to available {@link WindowUnit units}.
|
||||
*/
|
||||
public enum WindowUnits implements WindowUnit {
|
||||
DEFAULT, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND
|
||||
}
|
||||
|
||||
/**
|
||||
* A fluent builder to create a {@link SetWindowFieldsOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class SetWindowFieldsOperationBuilder {
|
||||
|
||||
private Object partitionBy;
|
||||
private SortOperation sortOperation;
|
||||
private WindowOutput output;
|
||||
|
||||
/**
|
||||
* Specify the field to group by.
|
||||
*
|
||||
* @param fieldName must not be {@literal null} or null.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder partitionByField(String fieldName) {
|
||||
|
||||
Assert.hasText(fieldName, "Field name must not be empty or null");
|
||||
return partitionBy(Fields.field("$" + fieldName, fieldName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression expression} to group by.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder partitionByExpression(AggregationExpression expression) {
|
||||
return partitionBy(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort {@link Sort.Direction#ASC ascending} by the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder sortBy(String... fields) {
|
||||
return sortBy(Sort.by(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the sort order.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder sortBy(Sort sort) {
|
||||
return sortBy(new SortOperation(sort));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SortOperation} to use.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) {
|
||||
|
||||
Assert.notNull(sort, "SortOperation must not be null");
|
||||
|
||||
this.sortOperation = sort;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the actual output computation.
|
||||
*
|
||||
* @param output must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder output(WindowOutput output) {
|
||||
|
||||
Assert.notNull(output, "WindowOutput must not be null");
|
||||
|
||||
this.output = output;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a field capturing the result of the given {@link AggregationExpression expression} to the output.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link WindowChoice}.
|
||||
*/
|
||||
public WindowChoice output(AggregationExpression expression) {
|
||||
|
||||
return new WindowChoice() {
|
||||
|
||||
@Nullable private Window window;
|
||||
|
||||
@Override
|
||||
public As within(Window window) {
|
||||
|
||||
Assert.notNull(window, "Window must not be null");
|
||||
|
||||
this.window = window;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SetWindowFieldsOperationBuilder as(String targetFieldName) {
|
||||
|
||||
Assert.hasText(targetFieldName, "Target field name must not be empty or null");
|
||||
|
||||
ComputedField computedField = new ComputedField(targetFieldName, expression, window);
|
||||
|
||||
if (SetWindowFieldsOperationBuilder.this.output == null) {
|
||||
SetWindowFieldsOperationBuilder.this.output = new WindowOutput(computedField);
|
||||
} else {
|
||||
SetWindowFieldsOperationBuilder.this.output.append(computedField);
|
||||
}
|
||||
|
||||
return SetWindowFieldsOperationBuilder.this;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface to capture field name used to capture the computation result.
|
||||
*/
|
||||
public interface As {
|
||||
|
||||
/**
|
||||
* Define the target name field name to hold the computation result.
|
||||
*
|
||||
* @param targetFieldName must not be {@literal null} or empty.
|
||||
* @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance.
|
||||
*/
|
||||
SetWindowFieldsOperationBuilder as(String targetFieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface to capture an optional {@link Window} applicable to the field computation.
|
||||
*/
|
||||
public interface WindowChoice extends As {
|
||||
|
||||
/**
|
||||
* Specify calculation boundaries.
|
||||
*
|
||||
* @param window must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
As within(Window window);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Partition by a value that translates to a valid mongodb expression.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder partitionBy(Object value) {
|
||||
|
||||
Assert.notNull(value, "Partition By must not be null");
|
||||
|
||||
partitionBy = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments.
|
||||
*
|
||||
* @return new instance of {@link SetWindowFieldsOperation}.
|
||||
*/
|
||||
public SetWindowFieldsOperation build() {
|
||||
return new SetWindowFieldsOperation(partitionBy, sortOperation, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -102,7 +102,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG);
|
||||
ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state);
|
||||
|
||||
return transform(new AggregationExpressionTransformationContext<ExpressionNode>(node, null, null, context));
|
||||
return transform(new AggregationExpressionTransformationContext<>(node, null, null, context));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -500,7 +500,10 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
dbo.put(methodReference.getArgumentMap()[i++], transform(child, context));
|
||||
}
|
||||
args = dbo;
|
||||
} else {
|
||||
} else if (ObjectUtils.nullSafeEquals(methodReference.getArgumentType(), ArgumentType.EMPTY_DOCUMENT)) {
|
||||
args = new Document();
|
||||
}
|
||||
else {
|
||||
|
||||
List<Object> argList = new ArrayList<Object>();
|
||||
|
||||
|
||||
@@ -18,8 +18,11 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.mongodb.util.RegexFlags;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -27,6 +30,7 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Divya Srivastava
|
||||
* @since 1.10
|
||||
*/
|
||||
public class StringOperators {
|
||||
@@ -516,6 +520,233 @@ public class StringOperators {
|
||||
return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the given
|
||||
* regular expression to find the document with the first match.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(String regex) {
|
||||
return createRegexFind().regex(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression resulting from the given {@link AggregationExpression} to find the document with the first
|
||||
* match.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(AggregationExpression expression) {
|
||||
return createRegexFind().regexOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the {@link Pattern} and applies the regular expression with
|
||||
* the options specified in the argument to find the document with the first match.
|
||||
*
|
||||
* @param pattern the pattern object to apply.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(Pattern pattern) {
|
||||
return createRegexFind().pattern(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression with the options specified in the argument to find the document with the first match.
|
||||
*
|
||||
* @param regex the regular expression to apply.
|
||||
* @param options the options to use.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(String regex, String options) {
|
||||
return createRegexFind().regex(regex).options(options);
|
||||
}
|
||||
|
||||
private RegexFind createRegexFind() {
|
||||
return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the given
|
||||
* regular expression to find all the documents with the match.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(String regex) {
|
||||
return createRegexFindAll().regex(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression resulting from the given {@link AggregationExpression} to find all the documents with the
|
||||
* match..<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(AggregationExpression expression) {
|
||||
return createRegexFindAll().regexOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with
|
||||
* the options specified in the argument to find all the documents with the match.
|
||||
*
|
||||
* @param pattern the pattern object to apply.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(Pattern pattern) {
|
||||
return createRegexFindAll().pattern(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression with the options specified in the argument to find all the documents with the match.
|
||||
*
|
||||
* @param regex the regular expression to apply.
|
||||
* @param options the options to use.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(String regex, String options) {
|
||||
return createRegexFindAll().regex(regex).options(options);
|
||||
}
|
||||
|
||||
private RegexFindAll createRegexFindAll() {
|
||||
return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the given
|
||||
* regular expression to find if a match is found or not.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(String regex) {
|
||||
return createRegexMatch().regex(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression resulting from the given {@link AggregationExpression} to find if a match is found or not.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(AggregationExpression expression) {
|
||||
return createRegexMatch().regexOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with
|
||||
* the options specified in the argument to find if a match is found or not.
|
||||
*
|
||||
* @param pattern the pattern object to apply.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(Pattern pattern) {
|
||||
return createRegexMatch().pattern(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression with the options specified in the argument to find if a match is found or not.
|
||||
*
|
||||
* @param regex the regular expression to apply.
|
||||
* @param options the options to use.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(String regex, String options) {
|
||||
return createRegexMatch().regex(regex).options(options);
|
||||
}
|
||||
|
||||
private RegexMatch createRegexMatch() {
|
||||
return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and replaces the first
|
||||
* occurrence of the search string with the given replacement.
|
||||
*
|
||||
* @param search
|
||||
* @param replacement
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public ReplaceOne replaceOne(String search, String replacement) {
|
||||
return createReplaceOne().find(search).replacement(replacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and replaces the first
|
||||
* occurrence of the search string computed by the given {@link AggregationExpression} with the given replacement.
|
||||
*
|
||||
* @param search
|
||||
* @param replacement
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public ReplaceOne replaceOne(AggregationExpression search, String replacement) {
|
||||
return createReplaceOne().findValueOf(search).replacement(replacement);
|
||||
}
|
||||
|
||||
private ReplaceOne createReplaceOne() {
|
||||
return usesFieldRef() ? ReplaceOne.valueOf(fieldReference) : ReplaceOne.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and replaces all
|
||||
* occurrences of the search string with the given replacement.
|
||||
*
|
||||
* @param search
|
||||
* @param replacement
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public ReplaceAll replaceAll(String search, String replacement) {
|
||||
return createReplaceAll().find(search).replacement(replacement);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and replaces all
|
||||
* occurrences of the search string computed by the given {@link AggregationExpression} with the given replacement.
|
||||
*
|
||||
* @param search
|
||||
* @param replacement
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public ReplaceAll replaceAll(AggregationExpression search, String replacement) {
|
||||
return createReplaceAll().findValueOf(search).replacement(replacement);
|
||||
}
|
||||
|
||||
private ReplaceAll createReplaceAll() {
|
||||
return usesFieldRef() ? ReplaceAll.valueOf(fieldReference) : ReplaceAll.valueOf(expression);
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
@@ -1477,4 +1708,720 @@ public class StringOperators {
|
||||
return "$rtrim";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and
|
||||
* returns information on the first matched substring. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class RegexFind extends AbstractAggregationExpression {
|
||||
|
||||
protected RegexFind(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public static RegexFind valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public static RegexFind valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFind(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the options to use with the regular expression.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind options(String options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new RegexFind(append("options", options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the reference to the {@link Field field} holding the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind optionsOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexFind(append("options", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind optionsOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFind(append("options", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the regular expression to apply.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind regex(String regex) {
|
||||
|
||||
Assert.notNull(regex, "Regex must not be null!");
|
||||
|
||||
return new RegexFind(append("regex", regex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a {@link Pattern} into {@code regex} and {@code options} fields.
|
||||
*
|
||||
* @param pattern must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind pattern(Pattern pattern) {
|
||||
|
||||
Assert.notNull(pattern, "Pattern must not be null!");
|
||||
|
||||
Map<String, Object> regex = append("regex", pattern.pattern());
|
||||
regex.put("options", RegexFlags.toRegexOptions(pattern.flags()));
|
||||
|
||||
return new RegexFind(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the regular expression to apply.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind regexOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new RegexFind(append("regex", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression} evaluating to the regular expression to apply.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind regexOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFind(append("regex", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$regexFind";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and
|
||||
* returns information on all the matched substrings. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class RegexFindAll extends AbstractAggregationExpression {
|
||||
|
||||
protected RegexFindAll(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public static RegexFindAll valueOf(String fieldReference) {
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as
|
||||
* {@literal input} value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public static RegexFindAll valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFindAll(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the options to use with the regular expression.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll options(String options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new RegexFindAll(append("options", options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the reference to the {@link Field field} holding the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll optionsOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new RegexFindAll(append("options", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll optionsOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFindAll(append("options", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a {@link Pattern} into {@code regex} and {@code options} fields.
|
||||
*
|
||||
* @param pattern must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll pattern(Pattern pattern) {
|
||||
|
||||
Assert.notNull(pattern, "Pattern must not be null!");
|
||||
|
||||
Map<String, Object> regex = append("regex", pattern.pattern());
|
||||
regex.put("options", RegexFlags.toRegexOptions(pattern.flags()));
|
||||
|
||||
return new RegexFindAll(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the regular expression to apply.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll regex(String regex) {
|
||||
|
||||
Assert.notNull(regex, "Regex must not be null!");
|
||||
|
||||
return new RegexFindAll(append("regex", regex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the regular expression to apply.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll regexOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new RegexFindAll(append("regex", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression} evaluating to the regular expression to apply.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll regexOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFindAll(append("regex", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$regexFindAll";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and
|
||||
* returns a boolean that indicates if a match is found or not. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class RegexMatch extends AbstractAggregationExpression {
|
||||
|
||||
protected RegexMatch(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public static RegexMatch valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public static RegexMatch valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexMatch(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the options to use with the regular expression.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch options(String options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new RegexMatch(append("options", options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the reference to the {@link Field field} holding the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch optionsOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexMatch(append("options", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch optionsOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexMatch(append("options", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a {@link Pattern} into {@code regex} and {@code options} fields.
|
||||
*
|
||||
* @param pattern must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch pattern(Pattern pattern) {
|
||||
|
||||
Assert.notNull(pattern, "Pattern must not be null!");
|
||||
|
||||
Map<String, Object> regex = append("regex", pattern.pattern());
|
||||
regex.put("options", RegexFlags.toRegexOptions(pattern.flags()));
|
||||
|
||||
return new RegexMatch(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the regular expression to apply.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch regex(String regex) {
|
||||
|
||||
Assert.notNull(regex, "Regex must not be null!");
|
||||
|
||||
return new RegexMatch(append("regex", regex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the regular expression to apply.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch regexOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexMatch(append("regex", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch regexOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexMatch(append("regex", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$regexMatch";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $replaceOne} which replaces the first instance of a search string in an
|
||||
* input string with a replacement string. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @author Christoph Strobl
|
||||
* @since 3.4
|
||||
*/
|
||||
public static class ReplaceOne extends AbstractAggregationExpression {
|
||||
|
||||
protected ReplaceOne(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReplaceOne} using the given as {@literal input}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public static ReplaceOne value(String value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
return new ReplaceOne(Collections.singletonMap("input", value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReplaceOne} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public static ReplaceOne valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new ReplaceOne(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReplaceOne} using the result of the provided {@link AggregationExpression} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public static ReplaceOne valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new ReplaceOne(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* The string to use to replace the first matched instance of {@code find} in input.
|
||||
*
|
||||
* @param replacement must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public ReplaceOne replacement(String replacement) {
|
||||
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
|
||||
return new ReplaceOne(append("replacement", replacement));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the reference to the {@link Field field} holding the string to use to replace the first matched
|
||||
* instance of {@code find} in input.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public ReplaceOne replacementOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new ReplaceOne(append("replacement", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the {@link AggregationExpression} evaluating to the string to use to replace the first matched instance
|
||||
* of {@code find} in {@code input}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public ReplaceOne replacementOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new ReplaceOne(append("replacement", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* The string to search for within the given input field.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public ReplaceOne find(String value) {
|
||||
|
||||
Assert.notNull(value, "Search string must not be null!");
|
||||
|
||||
return new ReplaceOne(append("find", value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the string to search for within the given input field.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public ReplaceOne findValueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new ReplaceOne(append("find", fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression} evaluating to the the string to search for within the given input
|
||||
* field.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public ReplaceOne findValueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new ReplaceOne(append("find", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$replaceOne";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $replaceAll} which replaces all instances of a search string in an input
|
||||
* string with a replacement string. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.4 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @author Christoph Strobl
|
||||
* @since 3.4
|
||||
*/
|
||||
public static class ReplaceAll extends AbstractAggregationExpression {
|
||||
|
||||
protected ReplaceAll(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReplaceAll} using the given as {@literal input}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceOne}.
|
||||
*/
|
||||
public static ReplaceAll value(String value) {
|
||||
|
||||
Assert.notNull(value, "Value must not be null!");
|
||||
|
||||
return new ReplaceAll(Collections.singletonMap("input", value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReplaceAll} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public static ReplaceAll valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new ReplaceAll(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ReplaceAll} using the result of the provided {@link AggregationExpression} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public static ReplaceAll valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new ReplaceAll(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* The string to use to replace the first matched instance of {@code find} in input.
|
||||
*
|
||||
* @param replacement must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public ReplaceAll replacement(String replacement) {
|
||||
|
||||
Assert.notNull(replacement, "Replacement must not be null!");
|
||||
|
||||
return new ReplaceAll(append("replacement", replacement));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the reference to the {@link Field field} holding the string to use to replace the first matched
|
||||
* instance of {@code find} in input.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public ReplaceAll replacementValueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new ReplaceAll(append("replacement", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the {@link AggregationExpression} evaluating to the string to use to replace the first matched instance
|
||||
* of {@code find} in input.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public ReplaceAll replacementValueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new ReplaceAll(append("replacement", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* The string to search for within the given input field.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public ReplaceAll find(String value) {
|
||||
|
||||
Assert.notNull(value, "Search string must not be null!");
|
||||
|
||||
return new ReplaceAll(append("find", value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the string to search for within the given input field.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public ReplaceAll findValueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new ReplaceAll(append("find", fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression} evaluating to the string to search for within the given input field.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ReplaceAll}.
|
||||
*/
|
||||
public ReplaceAll findValueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new ReplaceAll(append("find", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$replaceAll";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mongodb.DBRef;
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
public interface DbRefResolver {
|
||||
public interface DbRefResolver extends ReferenceResolver {
|
||||
|
||||
/**
|
||||
* Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method
|
||||
|
||||
@@ -15,13 +15,6 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@@ -29,28 +22,18 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.Document;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
@@ -67,13 +50,11 @@ import com.mongodb.client.model.Filters;
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
public class DefaultDbRefResolver implements DbRefResolver {
|
||||
public class DefaultDbRefResolver extends DefaultReferenceResolver implements DbRefResolver, ReferenceResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDbRefResolver.class);
|
||||
private static final Log LOGGER = LogFactory.getLog(DefaultDbRefResolver.class);
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}.
|
||||
@@ -82,11 +63,11 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) {
|
||||
|
||||
super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory), mongoDbFactory.getExceptionTranslator());
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
this.objenesis = new ObjenesisStd(true);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -114,17 +95,8 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
*/
|
||||
@Override
|
||||
public Document fetch(DBRef dbRef) {
|
||||
|
||||
MongoCollection<Document> mongoCollection = getCollection(dbRef);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Fetching DBRef '{}' from {}.{}.", dbRef.getId(),
|
||||
StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName()
|
||||
: mongoCollection.getNamespace().getDatabaseName(),
|
||||
dbRef.getCollectionName());
|
||||
}
|
||||
|
||||
return mongoCollection.find(Filters.eq("_id", dbRef.getId())).first();
|
||||
return getReferenceLoader().fetchOne(DocumentReferenceQuery.forSingleDocument(Filters.eq("_id", dbRef.getId())),
|
||||
ReferenceCollection.fromDBRef(dbRef));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -158,14 +130,14 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
MongoCollection<Document> mongoCollection = getCollection(databaseSource);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Bulk fetching DBRefs {} from {}.{}.", ids,
|
||||
LOGGER.trace(String.format("Bulk fetching DBRefs %s from %s.%s.", ids,
|
||||
StringUtils.hasText(databaseSource.getDatabaseName()) ? databaseSource.getDatabaseName()
|
||||
: mongoCollection.getNamespace().getDatabaseName(),
|
||||
databaseSource.getCollectionName());
|
||||
databaseSource.getCollectionName()));
|
||||
}
|
||||
|
||||
List<Document> result = mongoCollection //
|
||||
.find(new Document("_id", new Document("$in", ids))) //
|
||||
.find(new Document(BasicMongoPersistentProperty.ID_FIELD_NAME, new Document("$in", ids))) //
|
||||
.into(new ArrayList<>());
|
||||
|
||||
return ids.stream() //
|
||||
@@ -185,44 +157,9 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, @Nullable DBRef dbref,
|
||||
DbRefResolverCallback callback, DbRefProxyHandler handler) {
|
||||
|
||||
Class<?> propertyType = property.getType();
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback);
|
||||
Object lazyLoadingProxy = getProxyFactory().createLazyLoadingProxy(property, callback, dbref);
|
||||
|
||||
if (!propertyType.isInterface()) {
|
||||
|
||||
Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType));
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
|
||||
return handler.populateId(property, dbref, factory);
|
||||
}
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
|
||||
return handler.populateId(property, dbref, proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CGLib enhanced type for the given source type.
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getEnhancedTypeFor(Class<?> type) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
return enhancer.createClass();
|
||||
return handler.populateId(property, dbref, lazyLoadingProxy);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -245,253 +182,10 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
private static Stream<Document> documentWithId(Object identifier, Collection<Document> documents) {
|
||||
|
||||
return documents.stream() //
|
||||
.filter(it -> it.get("_id").equals(identifier)) //
|
||||
.filter(it -> it.get(BasicMongoPersistentProperty.ID_FIELD_NAME).equals(identifier)) //
|
||||
.limit(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link MethodInterceptor} that is used within a lazy loading proxy. The property resolving is delegated to a
|
||||
* {@link DbRefResolverCallback}. The resolving process is triggered by a method invocation on the proxy and is
|
||||
* guaranteed to be performed only once.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD;
|
||||
|
||||
private final DbRefResolverCallback callback;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private volatile boolean resolved;
|
||||
private final @Nullable DBRef dbref;
|
||||
private @Nullable Object result;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty},
|
||||
* {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param dbref can be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
*/
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, @Nullable DBRef dbref,
|
||||
PersistenceExceptionTranslator exceptionTranslator, DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "Exception translator must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
this.dbref = dbref;
|
||||
this.callback = callback;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Override
|
||||
public Object invoke(@Nullable MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.cglib.proxy.MethodInterceptor#intercept(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], org.springframework.cglib.proxy.MethodProxy)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public Object intercept(Object obj, Method method, Object[] args, @Nullable MethodProxy proxy) throws Throwable {
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return this.dbref;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(proxy);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(proxy, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode(proxy);
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a to string representation for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private String proxyToString(@Nullable Object proxy) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (dbref != null) {
|
||||
description.append(dbref.getCollectionName());
|
||||
description.append(":");
|
||||
description.append(dbref.getId());
|
||||
} else {
|
||||
description.append(System.identityHashCode(proxy));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hashcode for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private int proxyHashCode(@Nullable Object proxy) {
|
||||
return proxyToString(proxy).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an equality check for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @param that
|
||||
* @return
|
||||
*/
|
||||
private boolean proxyEquals(@Nullable Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Will trigger the resolution if the proxy is not resolved already or return a previously resolved result.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the proxy into its backing object.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (resolved) {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Accessing already resolved lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Resolving lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
|
||||
if (translatedException instanceof ClientSessionException) {
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex);
|
||||
}
|
||||
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook for obtaining the {@link MongoCollection} for a given {@link DBRef}.
|
||||
*
|
||||
@@ -504,4 +198,10 @@ public class DefaultDbRefResolver implements DbRefResolver {
|
||||
return MongoDatabaseUtils.getDatabase(dbref.getDatabaseName(), mongoDbFactory)
|
||||
.getCollection(dbref.getCollectionName(), Document.class);
|
||||
}
|
||||
|
||||
protected MongoCollection<Document> getCollection(ReferenceCollection context) {
|
||||
|
||||
return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(),
|
||||
Document.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ReferenceResolver} implementation that uses a given {@link ReferenceLookupDelegate} to load and convert entity
|
||||
* associations expressed via a {@link MongoPersistentProperty persitent property}. Creates {@link LazyLoadingProxy
|
||||
* proxies} for associations that should be lazily loaded.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Anton Buzdalkin
|
||||
* @since 3.3
|
||||
*/
|
||||
public class DefaultReferenceResolver implements ReferenceResolver {
|
||||
|
||||
private final ReferenceLoader referenceLoader;
|
||||
private final LazyLoadingProxyFactory proxyFactory;
|
||||
|
||||
private final LookupFunction collectionLookupFunction = (filter, ctx) -> getReferenceLoader().fetchMany(filter, ctx);
|
||||
private final LookupFunction singleValueLookupFunction = (filter, ctx) -> {
|
||||
Document target = getReferenceLoader().fetchOne(filter, ctx);
|
||||
return target == null ? Collections.emptyList() : Collections.singleton(target);
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DefaultReferenceResolver}.
|
||||
*
|
||||
* @param referenceLoader must not be {@literal null}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
public DefaultReferenceResolver(ReferenceLoader referenceLoader, PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(referenceLoader, "ReferenceLoader must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "ExceptionTranslator must not be null!");
|
||||
|
||||
this.referenceLoader = referenceLoader;
|
||||
this.proxyFactory = new LazyLoadingProxyFactory(exceptionTranslator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
|
||||
|
||||
LookupFunction lookupFunction = (property.isCollectionLike() || property.isMap()) ? collectionLookupFunction
|
||||
: singleValueLookupFunction;
|
||||
|
||||
if (isLazyReference(property)) {
|
||||
return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader);
|
||||
}
|
||||
|
||||
return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the association expressed by the given {@link MongoPersistentProperty property} should be resolved lazily.
|
||||
*
|
||||
* @param property
|
||||
* @return return {@literal true} if the defined association is lazy.
|
||||
* @see DBRef#lazy()
|
||||
* @see DocumentReference#lazy()
|
||||
*/
|
||||
protected boolean isLazyReference(MongoPersistentProperty property) {
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return property.getDocumentReference().lazy();
|
||||
}
|
||||
|
||||
return property.getDBRef() != null && property.getDBRef().lazy();
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link ReferenceLoader} executing the lookup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
protected ReferenceLoader getReferenceLoader() {
|
||||
return referenceLoader;
|
||||
}
|
||||
|
||||
LazyLoadingProxyFactory getProxyFactory() {
|
||||
return proxyFactory;
|
||||
}
|
||||
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) {
|
||||
return proxyFactory.createLazyLoadingProxy(property, it -> {
|
||||
return referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader);
|
||||
}, source instanceof DocumentReferenceSource ? ((DocumentReferenceSource)source).getTargetSource() : source);
|
||||
}
|
||||
}
|
||||
@@ -91,7 +91,7 @@ class DocumentAccessor {
|
||||
public void put(MongoPersistentProperty prop, @Nullable Object value) {
|
||||
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = prop.getFieldName();
|
||||
String fieldName = getFieldName(prop);
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
BsonUtils.addToMap(document, fieldName, value);
|
||||
@@ -123,7 +123,7 @@ class DocumentAccessor {
|
||||
*/
|
||||
@Nullable
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
return BsonUtils.resolveValue(document, property.getFieldName());
|
||||
return BsonUtils.resolveValue(document, getFieldName(property));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -150,7 +150,11 @@ class DocumentAccessor {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
|
||||
return BsonUtils.hasValue(document, property.getFieldName());
|
||||
return BsonUtils.hasValue(document, getFieldName(property));
|
||||
}
|
||||
|
||||
String getFieldName(MongoPersistentProperty prop) {
|
||||
return prop.getFieldName();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,259 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.WeakHashMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* Internal API to construct {@link DocumentPointer} for a given property. Considers {@link LazyLoadingProxy},
|
||||
* registered {@link Object} to {@link DocumentPointer} {@link org.springframework.core.convert.converter.Converter},
|
||||
* simple {@literal _id} lookups and cases where the {@link DocumentPointer} needs to be computed via a lookup query.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
class DocumentPointerFactory {
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final Map<String, LinkageDocument> cache;
|
||||
|
||||
/**
|
||||
* A {@link Pattern} matching quoted and unquoted variants (with/out whitespaces) of
|
||||
* <code>{'_id' : ?#{#target} }</code>.
|
||||
*/
|
||||
private static final Pattern DEFAULT_LOOKUP_PATTERN = Pattern.compile("\\{\\s?" + // document start (whitespace opt)
|
||||
"['\"]?_id['\"]?" + // followed by an optionally quoted _id. Like: _id, '_id' or "_id"
|
||||
"?\\s?:\\s?" + // then a colon optionally wrapped inside whitespaces
|
||||
"['\"]?\\?#\\{#target\\}['\"]?" + // leading to the potentially quoted ?#{#target} expression
|
||||
"\\s*}"); // some optional whitespaces and document close
|
||||
|
||||
DocumentPointerFactory(ConversionService conversionService,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.conversionService = conversionService;
|
||||
this.mappingContext = mappingContext;
|
||||
this.cache = new WeakHashMap<>();
|
||||
}
|
||||
|
||||
DocumentPointer<?> computePointer(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentProperty property, Object value, Class<?> typeHint) {
|
||||
|
||||
if (value instanceof LazyLoadingProxy) {
|
||||
return () -> ((LazyLoadingProxy) value).getSource();
|
||||
}
|
||||
|
||||
if (conversionService.canConvert(typeHint, DocumentPointer.class)) {
|
||||
return conversionService.convert(value, DocumentPointer.class);
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getRequiredPersistentEntity(property.getAssociationTargetType());
|
||||
|
||||
if (usesDefaultLookup(property)) {
|
||||
|
||||
MongoPersistentProperty idProperty = persistentEntity.getIdProperty();
|
||||
Object idValue = persistentEntity.getIdentifierAccessor(value).getIdentifier();
|
||||
|
||||
if (idProperty.hasExplicitWriteTarget()
|
||||
&& conversionService.canConvert(idValue.getClass(), idProperty.getFieldType())) {
|
||||
return () -> conversionService.convert(idValue, idProperty.getFieldType());
|
||||
}
|
||||
|
||||
if (idValue instanceof String && ObjectId.isValid((String) idValue)) {
|
||||
return () -> new ObjectId((String) idValue);
|
||||
}
|
||||
|
||||
return () -> idValue;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> valueEntity = mappingContext.getPersistentEntity(value.getClass());
|
||||
PersistentPropertyAccessor<Object> propertyAccessor;
|
||||
if (valueEntity == null) {
|
||||
propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value);
|
||||
} else {
|
||||
propertyAccessor = valueEntity.getPropertyPathAccessor(value);
|
||||
}
|
||||
|
||||
return cache.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::from)
|
||||
.getDocumentPointer(mappingContext, persistentEntity, propertyAccessor);
|
||||
}
|
||||
|
||||
private boolean usesDefaultLookup(MongoPersistentProperty property) {
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return DEFAULT_LOOKUP_PATTERN.matcher(property.getDocumentReference().lookup()).matches();
|
||||
}
|
||||
|
||||
Reference atReference = property.findAnnotation(Reference.class);
|
||||
if (atReference != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
throw new IllegalStateException(String.format("%s does not seem to be define Reference", property));
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object that computes a document pointer from a given lookup query by identifying SpEL expressions and
|
||||
* inverting it.
|
||||
*
|
||||
* <pre class="code">
|
||||
* // source
|
||||
* { 'firstname' : ?#{fn}, 'lastname' : '?#{ln} }
|
||||
*
|
||||
* // target
|
||||
* { 'fn' : ..., 'ln' : ... }
|
||||
* </pre>
|
||||
*
|
||||
* The actual pointer is the computed via
|
||||
* {@link #getDocumentPointer(MappingContext, MongoPersistentEntity, PersistentPropertyAccessor)} applying values from
|
||||
* the provided {@link PersistentPropertyAccessor} to the target document by looking at the keys of the expressions
|
||||
* from the source.
|
||||
*/
|
||||
static class LinkageDocument {
|
||||
|
||||
static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\?#\\{#?(?<fieldName>[\\w\\d\\.\\-)]*)\\}");
|
||||
static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("###_(?<index>\\d*)_###");
|
||||
|
||||
private final String lookup;
|
||||
private final org.bson.Document documentPointer;
|
||||
private final Map<String, String> placeholderMap;
|
||||
private final boolean isSimpleTargetPointer;
|
||||
|
||||
static LinkageDocument from(String lookup) {
|
||||
return new LinkageDocument(lookup);
|
||||
}
|
||||
|
||||
private LinkageDocument(String lookup) {
|
||||
|
||||
this.lookup = lookup;
|
||||
this.placeholderMap = new LinkedHashMap<>();
|
||||
|
||||
int index = 0;
|
||||
Matcher matcher = EXPRESSION_PATTERN.matcher(lookup);
|
||||
String targetLookup = lookup;
|
||||
|
||||
while (matcher.find()) {
|
||||
|
||||
String expression = matcher.group();
|
||||
String fieldName = matcher.group("fieldName").replace("target.", "");
|
||||
|
||||
String placeholder = placeholder(index);
|
||||
placeholderMap.put(placeholder, fieldName);
|
||||
targetLookup = targetLookup.replace(expression, "'" + placeholder + "'");
|
||||
index++;
|
||||
}
|
||||
|
||||
this.documentPointer = org.bson.Document.parse(targetLookup);
|
||||
this.isSimpleTargetPointer = placeholderMap.size() == 1 && placeholderMap.containsValue("target")
|
||||
&& lookup.contains("#target");
|
||||
}
|
||||
|
||||
private String placeholder(int index) {
|
||||
return "###_" + index + "_###";
|
||||
}
|
||||
|
||||
private boolean isPlaceholder(String key) {
|
||||
return PLACEHOLDER_PATTERN.matcher(key).matches();
|
||||
}
|
||||
|
||||
DocumentPointer<Object> getDocumentPointer(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
|
||||
return () -> updatePlaceholders(documentPointer, new Document(), mappingContext, persistentEntity,
|
||||
propertyAccessor);
|
||||
}
|
||||
|
||||
Object updatePlaceholders(org.bson.Document source, org.bson.Document target,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
|
||||
|
||||
for (Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getKey().startsWith("$")) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
"Cannot derive document pointer from lookup '%s' using query operator (%s). Please consider registering a custom converter.",
|
||||
lookup, entry.getKey()));
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Document) {
|
||||
|
||||
MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey());
|
||||
if (persistentProperty != null && persistentProperty.isEntity()) {
|
||||
|
||||
MongoPersistentEntity<?> nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType());
|
||||
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
|
||||
nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty))));
|
||||
} else {
|
||||
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
|
||||
persistentEntity, propertyAccessor));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (placeholderMap.containsKey(entry.getValue())) {
|
||||
|
||||
String attribute = placeholderMap.get(entry.getValue());
|
||||
if (attribute.contains(".")) {
|
||||
attribute = attribute.substring(attribute.lastIndexOf('.') + 1);
|
||||
}
|
||||
|
||||
String fieldName = entry.getKey().equals("_id") ? "id" : entry.getKey();
|
||||
if (!fieldName.contains(".")) {
|
||||
|
||||
Object targetValue = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(fieldName));
|
||||
target.put(attribute, targetValue);
|
||||
continue;
|
||||
}
|
||||
|
||||
PersistentPropertyPath<?> path = mappingContext
|
||||
.getPersistentPropertyPath(PropertyPath.from(fieldName, persistentEntity.getTypeInformation()));
|
||||
Object targetValue = propertyAccessor.getProperty(path);
|
||||
target.put(attribute, targetValue);
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
if (target.size() == 1 && isSimpleTargetPointer) {
|
||||
return target.values().iterator().next();
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* The source object to resolve document references upon. Encapsulates the actual source and the reference specific
|
||||
* values.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class DocumentReferenceSource {
|
||||
|
||||
private final Object self;
|
||||
|
||||
private final @Nullable Object targetSource;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DocumentReferenceSource}.
|
||||
*
|
||||
* @param self the entire wrapper object holding references. Must not be {@literal null}.
|
||||
* @param targetSource the reference value source.
|
||||
*/
|
||||
DocumentReferenceSource(Object self, @Nullable Object targetSource) {
|
||||
|
||||
this.self = self;
|
||||
this.targetSource = targetSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the outer document.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public Object getSelf() {
|
||||
return self;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual (property specific) reference value.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public Object getTargetSource() {
|
||||
return targetSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dereference a {@code targetSource} if it is a {@link DocumentReferenceSource} or return {@code source} otherwise.
|
||||
*
|
||||
* @param source
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
static Object getTargetSource(Object source) {
|
||||
return source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() : source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dereference a {@code self} object if it is a {@link DocumentReferenceSource} or return {@code self} otherwise.
|
||||
*
|
||||
* @param self
|
||||
* @return
|
||||
*/
|
||||
static Object getSelf(Object self) {
|
||||
return self instanceof DocumentReferenceSource ? ((DocumentReferenceSource) self).getSelf() : self;
|
||||
}
|
||||
}
|
||||
@@ -15,18 +15,18 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Allows direct interaction with the underlying {@link LazyLoadingInterceptor}.
|
||||
* Allows direct interaction with the underlying {@code LazyLoadingInterceptor}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.5
|
||||
* @see LazyLoadingProxyFactory
|
||||
*/
|
||||
public interface LazyLoadingProxy {
|
||||
|
||||
@@ -46,4 +46,15 @@ public interface LazyLoadingProxy {
|
||||
*/
|
||||
@Nullable
|
||||
DBRef toDBRef();
|
||||
|
||||
/**
|
||||
* Returns the raw {@literal source} object that defines the reference.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
@Nullable
|
||||
default Object getSource() {
|
||||
return toDBRef();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,303 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* {@link ProxyFactory} to create a proxy for {@link MongoPersistentProperty#getType()} to resolve a reference lazily.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class LazyLoadingProxyFactory {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(LazyLoadingProxyFactory.class);
|
||||
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
public LazyLoadingProxyFactory(PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.objenesis = new ObjenesisStd(true);
|
||||
}
|
||||
|
||||
public Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback,
|
||||
Object source) {
|
||||
|
||||
Class<?> propertyType = property.getType();
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, callback, source, exceptionTranslator);
|
||||
|
||||
if (!propertyType.isInterface()) {
|
||||
|
||||
Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType));
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
|
||||
return factory;
|
||||
}
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
|
||||
return proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CGLib enhanced type for the given source type.
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getEnhancedTypeFor(Class<?> type) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
return enhancer.createClass();
|
||||
}
|
||||
|
||||
public static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
GET_SOURCE_METHOD = LazyLoadingProxy.class.getMethod("getSource");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private final DbRefResolverCallback callback;
|
||||
private final Object source;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private volatile boolean resolved;
|
||||
private @Nullable Object result;
|
||||
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, DbRefResolverCallback callback, Object source,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
this.property = property;
|
||||
this.callback = callback;
|
||||
this.source = source;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object invoke(MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable {
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return source instanceof DBRef ? source : null;
|
||||
}
|
||||
|
||||
if (GET_SOURCE_METHOD.equals(method)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(source);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(o, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode();
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
private String proxyToString(@Nullable Object source) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (source != null) {
|
||||
if (source instanceof DBRef) {
|
||||
description.append(((DBRef) source).getCollectionName());
|
||||
description.append(":");
|
||||
description.append(((DBRef) source).getId());
|
||||
} else {
|
||||
description.append(source);
|
||||
}
|
||||
} else {
|
||||
description.append(System.identityHashCode(source));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
private boolean proxyEquals(@Nullable Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
private int proxyHashCode() {
|
||||
return proxyToString(source).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (resolved) {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace(String.format("Accessing already resolved lazy loading property %s.%s",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace(String.format("Resolving lazy loading property %s.%s",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()));
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
|
||||
if (translatedException instanceof ClientSessionException) {
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex);
|
||||
}
|
||||
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -27,27 +27,38 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.bson.codecs.DecoderContext;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.json.JsonReader;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanClassLoaderAware;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.AccessOptions;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.Parameter;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.PersistentPropertyPathAccessor;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PreferredConstructor.Parameter;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
@@ -61,8 +72,11 @@ import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped.OnEmpty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
@@ -70,7 +84,11 @@ import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.Predicates;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -97,6 +115,7 @@ import com.mongodb.DBRef;
|
||||
* @author Mark Paluch
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Heesu Jung
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
@@ -105,20 +124,24 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
public static final ClassTypeInformation<Bson> BSON = ClassTypeInformation.from(Bson.class);
|
||||
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
protected static final Log LOGGER = LogFactory.getLog(MappingMongoConverter.class);
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
protected final QueryMapper idMapper;
|
||||
protected final DbRefResolver dbRefResolver;
|
||||
protected final DefaultDbRefProxyHandler dbRefProxyHandler;
|
||||
protected final ReferenceLookupDelegate referenceLookupDelegate;
|
||||
|
||||
protected @Nullable ApplicationContext applicationContext;
|
||||
protected MongoTypeMapper typeMapper;
|
||||
protected @Nullable String mapKeyDotReplacement = null;
|
||||
protected @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private MongoTypeMapper defaultTypeMapper;
|
||||
private SpELContext spELContext;
|
||||
private @Nullable EntityCallbacks entityCallbacks;
|
||||
private final DocumentPointerFactory documentPointerFactory;
|
||||
private final SpelAwareProxyProjectionFactory projectionFactory = new SpelAwareProxyProjectionFactory();
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
@@ -135,8 +158,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
|
||||
this.dbRefResolver = dbRefResolver;
|
||||
|
||||
this.mappingContext = mappingContext;
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext,
|
||||
this.defaultTypeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext,
|
||||
this::getWriteTarget);
|
||||
this.idMapper = new QueryMapper(this);
|
||||
|
||||
@@ -147,6 +171,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
ConversionContext context = getConversionContext(path);
|
||||
return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator);
|
||||
});
|
||||
|
||||
this.referenceLookupDelegate = new ReferenceLookupDelegate(mappingContext, spELContext);
|
||||
this.documentPointerFactory = new DocumentPointerFactory(conversionService, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -159,8 +186,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
Assert.notNull(path, "ObjectPath must not be null");
|
||||
|
||||
return new ConversionContext(conversions, path, this::readDocument, this::readCollectionOrArray, this::readMap,
|
||||
this::readDBRef, this::getPotentiallyConvertedSimpleRead);
|
||||
return new ConversionContext(this, conversions, path, this::readDocument, this::readCollectionOrArray,
|
||||
this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -186,9 +213,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param typeMapper the typeMapper to set. Can be {@literal null}.
|
||||
*/
|
||||
public void setTypeMapper(@Nullable MongoTypeMapper typeMapper) {
|
||||
this.typeMapper = typeMapper == null
|
||||
? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext)
|
||||
: typeMapper;
|
||||
this.typeMapper = typeMapper;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -197,7 +222,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
@Override
|
||||
public MongoTypeMapper getTypeMapper() {
|
||||
return this.typeMapper;
|
||||
return this.typeMapper == null ? this.defaultTypeMapper : this.typeMapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProjectionFactory getProjectionFactory() {
|
||||
return projectionFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CustomConversions getCustomConversions() {
|
||||
return conversions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -242,10 +277,17 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
this.applicationContext = applicationContext;
|
||||
this.spELContext = new SpELContext(this.spELContext, applicationContext);
|
||||
this.projectionFactory.setBeanFactory(applicationContext);
|
||||
this.projectionFactory.setBeanClassLoader(applicationContext.getClassLoader());
|
||||
|
||||
if (entityCallbacks == null) {
|
||||
setEntityCallbacks(EntityCallbacks.create(applicationContext));
|
||||
}
|
||||
|
||||
ClassLoader classLoader = applicationContext.getClassLoader();
|
||||
if (this.defaultTypeMapper instanceof BeanClassLoaderAware && classLoader != null) {
|
||||
((BeanClassLoaderAware) this.defaultTypeMapper).setBeanClassLoader(classLoader);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -264,11 +306,155 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R> R project(EntityProjection<R, ?> projection, Bson bson) {
|
||||
|
||||
if (!projection.isProjection()) { // backed by real object
|
||||
|
||||
TypeInformation<?> typeToRead = projection.getMappedType().getType().isInterface() ? projection.getDomainType()
|
||||
: projection.getMappedType();
|
||||
return (R) read(typeToRead, bson);
|
||||
}
|
||||
|
||||
ProjectingConversionContext context = new ProjectingConversionContext(this, conversions, ObjectPath.ROOT,
|
||||
this::readCollectionOrArray, this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead,
|
||||
projection);
|
||||
|
||||
return doReadProjection(context, bson, projection);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <R> R doReadProjection(ConversionContext context, Bson bson, EntityProjection<R, ?> projection) {
|
||||
|
||||
MongoPersistentEntity<?> entity = getMappingContext().getRequiredPersistentEntity(projection.getActualDomainType());
|
||||
TypeInformation<?> mappedType = projection.getActualMappedType();
|
||||
MongoPersistentEntity<R> mappedEntity = (MongoPersistentEntity<R>) getMappingContext()
|
||||
.getPersistentEntity(mappedType);
|
||||
SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext);
|
||||
|
||||
boolean isInterfaceProjection = mappedType.getType().isInterface();
|
||||
if (isInterfaceProjection) {
|
||||
|
||||
PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(mappedEntity);
|
||||
DocumentAccessor documentAccessor = new DocumentAccessor(bson);
|
||||
PersistentPropertyAccessor<?> accessor = new MapPersistentPropertyAccessor();
|
||||
|
||||
PersistentPropertyAccessor<?> convertingAccessor = PropertyTranslatingPropertyAccessor
|
||||
.create(new ConvertingPropertyAccessor<>(accessor, conversionService), propertyTranslator);
|
||||
MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(context, documentAccessor,
|
||||
evaluator);
|
||||
|
||||
readProperties(context, entity, convertingAccessor, documentAccessor, valueProvider, evaluator,
|
||||
Predicates.isTrue());
|
||||
return (R) projectionFactory.createProjection(mappedType.getType(), accessor.getBean());
|
||||
}
|
||||
|
||||
// DTO projection
|
||||
if (mappedEntity == null) {
|
||||
throw new MappingException(String.format("No mapping metadata found for %s", mappedType.getType().getName()));
|
||||
}
|
||||
|
||||
// create target instance, merge metadata from underlying DTO type
|
||||
PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(entity,
|
||||
Predicates.negate(MongoPersistentProperty::hasExplicitFieldName));
|
||||
DocumentAccessor documentAccessor = new DocumentAccessor(bson) {
|
||||
@Override
|
||||
String getFieldName(MongoPersistentProperty prop) {
|
||||
return propertyTranslator.translate(prop).getFieldName();
|
||||
}
|
||||
};
|
||||
|
||||
PreferredConstructor<?, MongoPersistentProperty> persistenceConstructor = mappedEntity.getPersistenceConstructor();
|
||||
ParameterValueProvider<MongoPersistentProperty> provider = persistenceConstructor != null
|
||||
&& persistenceConstructor.hasParameters()
|
||||
? getParameterProvider(context, mappedEntity, documentAccessor, evaluator)
|
||||
: NoOpParameterValueProvider.INSTANCE;
|
||||
|
||||
EntityInstantiator instantiator = instantiators.getInstantiatorFor(mappedEntity);
|
||||
R instance = instantiator.createInstance(mappedEntity, provider);
|
||||
PersistentPropertyAccessor<R> accessor = mappedEntity.getPropertyAccessor(instance);
|
||||
|
||||
populateProperties(context, mappedEntity, documentAccessor, evaluator, instance);
|
||||
|
||||
PersistentPropertyAccessor<?> convertingAccessor = new ConvertingPropertyAccessor<>(accessor, conversionService);
|
||||
MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(context, documentAccessor, evaluator);
|
||||
|
||||
readProperties(context, mappedEntity, convertingAccessor, documentAccessor, valueProvider, evaluator,
|
||||
Predicates.isTrue());
|
||||
|
||||
return accessor.getBean();
|
||||
}
|
||||
|
||||
private Object doReadOrProject(ConversionContext context, Bson source, TypeInformation<?> typeHint,
|
||||
EntityProjection<?, ?> typeDescriptor) {
|
||||
|
||||
if (typeDescriptor.isProjection()) {
|
||||
return doReadProjection(context, BsonUtils.asDocument(source), typeDescriptor);
|
||||
}
|
||||
|
||||
return readDocument(context, source, typeHint);
|
||||
}
|
||||
|
||||
class ProjectingConversionContext extends ConversionContext {
|
||||
|
||||
private final EntityProjection<?, ?> returnedTypeDescriptor;
|
||||
|
||||
ProjectingConversionContext(MongoConverter sourceConverter, CustomConversions customConversions, ObjectPath path,
|
||||
ContainerValueConverter<Collection<?>> collectionConverter, ContainerValueConverter<Bson> mapConverter,
|
||||
ContainerValueConverter<DBRef> dbRefConverter, ValueConverter<Object> elementConverter,
|
||||
EntityProjection<?, ?> projection) {
|
||||
super(sourceConverter, customConversions, path,
|
||||
(context, source, typeHint) -> doReadOrProject(context, source, typeHint, projection),
|
||||
|
||||
collectionConverter, mapConverter, dbRefConverter, elementConverter);
|
||||
this.returnedTypeDescriptor = projection;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConversionContext forProperty(String name) {
|
||||
|
||||
EntityProjection<?, ?> property = returnedTypeDescriptor.findProperty(name);
|
||||
if (property == null) {
|
||||
return new ConversionContext(sourceConverter, conversions, path, MappingMongoConverter.this::readDocument, collectionConverter,
|
||||
mapConverter, dbRefConverter, elementConverter);
|
||||
}
|
||||
|
||||
return new ProjectingConversionContext(sourceConverter, conversions, path, collectionConverter, mapConverter,
|
||||
dbRefConverter, elementConverter, property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConversionContext withPath(ObjectPath currentPath) {
|
||||
return new ProjectingConversionContext(sourceConverter, conversions, currentPath, collectionConverter,
|
||||
mapConverter, dbRefConverter, elementConverter, returnedTypeDescriptor);
|
||||
}
|
||||
}
|
||||
|
||||
static class MapPersistentPropertyAccessor implements PersistentPropertyAccessor<Map<String, Object>> {
|
||||
|
||||
Map<String, Object> map = new LinkedHashMap<>();
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentProperty<?> persistentProperty, Object o) {
|
||||
map.put(persistentProperty.getName(), o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(PersistentProperty<?> persistentProperty) {
|
||||
return map.get(persistentProperty.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> getBean() {
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.MongoReader#read(java.lang.Class, com.mongodb.Document)
|
||||
*/
|
||||
public <S extends Object> S read(Class<S> clazz, final Bson bson) {
|
||||
public <S extends Object> S read(Class<S> clazz, Bson bson) {
|
||||
return read(ClassTypeInformation.from(clazz), bson);
|
||||
}
|
||||
|
||||
@@ -290,7 +476,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
TypeInformation<? extends S> typeHint) {
|
||||
|
||||
Document document = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson;
|
||||
TypeInformation<? extends S> typeToRead = typeMapper.readType(document, typeHint);
|
||||
TypeInformation<? extends S> typeToRead = getTypeMapper().readType(document, typeHint);
|
||||
Class<? extends S> rawType = typeToRead.getType();
|
||||
|
||||
if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) {
|
||||
@@ -352,11 +538,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
parameterProvider);
|
||||
}
|
||||
|
||||
private <S extends Object> S read(ConversionContext context, MongoPersistentEntity<S> entity, Document bson) {
|
||||
private <S> S read(ConversionContext context, MongoPersistentEntity<S> entity, Document bson) {
|
||||
|
||||
SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext);
|
||||
DocumentAccessor documentAccessor = new DocumentAccessor(bson);
|
||||
|
||||
if (hasIdentifier(bson)) {
|
||||
S existing = findContextualEntity(context, entity, bson);
|
||||
if (existing != null) {
|
||||
return existing;
|
||||
}
|
||||
}
|
||||
|
||||
PreferredConstructor<S, MongoPersistentProperty> persistenceConstructor = entity.getPersistenceConstructor();
|
||||
|
||||
ParameterValueProvider<MongoPersistentProperty> provider = persistenceConstructor != null
|
||||
@@ -367,12 +560,23 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
S instance = instantiator.createInstance(entity, provider);
|
||||
|
||||
if (entity.requiresPropertyPopulation()) {
|
||||
|
||||
return populateProperties(context, entity, documentAccessor, evaluator, instance);
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
private boolean hasIdentifier(Document bson) {
|
||||
return bson.get(BasicMongoPersistentProperty.ID_FIELD_NAME) != null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private <S> S findContextualEntity(ConversionContext context, MongoPersistentEntity<S> entity, Document bson) {
|
||||
return context.getPath().getPathItem(bson.get(BasicMongoPersistentProperty.ID_FIELD_NAME), entity.getCollection(),
|
||||
entity.getType());
|
||||
}
|
||||
|
||||
private <S> S populateProperties(ConversionContext context, MongoPersistentEntity<S> entity,
|
||||
DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator, S instance) {
|
||||
|
||||
@@ -388,7 +592,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(contextToUse, documentAccessor,
|
||||
evaluator);
|
||||
|
||||
readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator);
|
||||
Predicate<MongoPersistentProperty> propertyFilter = isIdentifier(entity).or(isConstructorArgument(entity)).negate();
|
||||
readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator, propertyFilter);
|
||||
|
||||
return accessor.getBean();
|
||||
}
|
||||
@@ -430,50 +635,54 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
private void readProperties(ConversionContext context, MongoPersistentEntity<?> entity,
|
||||
PersistentPropertyAccessor<?> accessor, DocumentAccessor documentAccessor,
|
||||
MongoDbPropertyValueProvider valueProvider, SpELExpressionEvaluator evaluator) {
|
||||
MongoDbPropertyValueProvider valueProvider, SpELExpressionEvaluator evaluator,
|
||||
Predicate<MongoPersistentProperty> propertyFilter) {
|
||||
|
||||
DbRefResolverCallback callback = null;
|
||||
|
||||
for (MongoPersistentProperty prop : entity) {
|
||||
|
||||
if (!propertyFilter.test(prop)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ConversionContext propertyContext = context.forProperty(prop.getName());
|
||||
MongoDbPropertyValueProvider valueProviderToUse = valueProvider.withContext(propertyContext);
|
||||
|
||||
if (prop.isAssociation() && !entity.isConstructorArgument(prop)) {
|
||||
|
||||
if (callback == null) {
|
||||
callback = getDbRefResolverCallback(context, documentAccessor, evaluator);
|
||||
callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator);
|
||||
}
|
||||
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback);
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback,
|
||||
propertyContext, evaluator);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prop.isUnwrapped()) {
|
||||
|
||||
accessor.setProperty(prop,
|
||||
readUnwrapped(context, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop)));
|
||||
readUnwrapped(propertyContext, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop)));
|
||||
continue;
|
||||
}
|
||||
|
||||
// We skip the id property since it was already set
|
||||
|
||||
if (entity.isIdProperty(prop)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entity.isConstructorArgument(prop) || !documentAccessor.hasValue(prop)) {
|
||||
if (!documentAccessor.hasValue(prop)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prop.isAssociation()) {
|
||||
|
||||
if (callback == null) {
|
||||
callback = getDbRefResolverCallback(context, documentAccessor, evaluator);
|
||||
callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator);
|
||||
}
|
||||
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback);
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback,
|
||||
propertyContext, evaluator);
|
||||
continue;
|
||||
}
|
||||
|
||||
accessor.setProperty(prop, valueProvider.getPropertyValue(prop));
|
||||
accessor.setProperty(prop, valueProviderToUse.getPropertyValue(prop));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -485,16 +694,43 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
private void readAssociation(Association<MongoPersistentProperty> association, PersistentPropertyAccessor<?> accessor,
|
||||
DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback) {
|
||||
DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback,
|
||||
ConversionContext context, SpELExpressionEvaluator evaluator) {
|
||||
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
Object value = documentAccessor.get(property);
|
||||
|
||||
if (property.isDocumentReference()
|
||||
|| (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) {
|
||||
|
||||
// quite unusual but sounds like worth having?
|
||||
|
||||
if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) {
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
DocumentPointer<?> pointer = () -> value;
|
||||
|
||||
// collection like special treatment
|
||||
accessor.setProperty(property, conversionService.convert(pointer, property.getActualType()));
|
||||
} else {
|
||||
|
||||
accessor.setProperty(property,
|
||||
dbRefResolver.resolveReference(property,
|
||||
new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)),
|
||||
referenceLookupDelegate, context::convert));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
DBRef dbref = value instanceof DBRef ? (DBRef) value : null;
|
||||
|
||||
accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler));
|
||||
}
|
||||
|
||||
@@ -535,6 +771,49 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return createDBRef(object, referringProperty);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
|
||||
|
||||
if (source instanceof LazyLoadingProxy) {
|
||||
return () -> ((LazyLoadingProxy) source).getSource();
|
||||
}
|
||||
|
||||
Assert.notNull(referringProperty, "Cannot create DocumentReference. The referringProperty must not be null!");
|
||||
|
||||
if (referringProperty.isDbReference()) {
|
||||
return () -> toDBRef(source, referringProperty);
|
||||
}
|
||||
|
||||
if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) {
|
||||
return createDocumentPointer(source, referringProperty);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("The referringProperty is neither a DBRef nor a document reference");
|
||||
}
|
||||
|
||||
DocumentPointer<?> createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
|
||||
|
||||
if (referringProperty == null) {
|
||||
return () -> source;
|
||||
}
|
||||
|
||||
if (source instanceof DocumentPointer) {
|
||||
return (DocumentPointer<?>) source;
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignableValue(referringProperty.getType(), source)
|
||||
&& conversionService.canConvert(referringProperty.getType(), DocumentPointer.class)) {
|
||||
return conversionService.convert(source, DocumentPointer.class);
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) {
|
||||
return documentPointerFactory.computePointer(mappingContext, referringProperty, source,
|
||||
referringProperty.getActualType());
|
||||
}
|
||||
|
||||
return () -> source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Root entry method into write conversion. Adds a type discriminator to the {@link Document}. Shouldn't be called for
|
||||
* nested conversions.
|
||||
@@ -556,7 +835,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
BsonUtils.removeNullId(bson);
|
||||
|
||||
if (requiresTypeHint(entityType)) {
|
||||
typeMapper.writeType(type, bson);
|
||||
getTypeMapper().writeType(type, bson);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -642,6 +921,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
continue;
|
||||
}
|
||||
if (prop.isAssociation()) {
|
||||
|
||||
writeAssociation(prop.getRequiredAssociation(), accessor, dbObjectAccessor);
|
||||
continue;
|
||||
}
|
||||
@@ -649,10 +929,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Object value = accessor.getProperty(prop);
|
||||
|
||||
if (value == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!conversions.isSimpleType(value.getClass())) {
|
||||
if (prop.writeNullValues()) {
|
||||
dbObjectAccessor.put(prop, null);
|
||||
}
|
||||
} else if (!conversions.isSimpleType(value.getClass())) {
|
||||
writePropertyInternal(value, dbObjectAccessor, prop);
|
||||
} else {
|
||||
writeSimpleInternal(value, bson, prop);
|
||||
@@ -665,7 +945,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
MongoPersistentProperty inverseProp = association.getInverse();
|
||||
|
||||
writePropertyInternal(accessor.getProperty(inverseProp), dbObjectAccessor, inverseProp);
|
||||
Object value = accessor.getProperty(inverseProp);
|
||||
|
||||
if (value == null && !inverseProp.isUnwrapped() && inverseProp.writeNullValues()) {
|
||||
dbObjectAccessor.put(inverseProp, null);
|
||||
return;
|
||||
}
|
||||
|
||||
writePropertyInternal(value, dbObjectAccessor, inverseProp);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
@@ -678,6 +965,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
TypeInformation<?> valueType = ClassTypeInformation.from(obj.getClass());
|
||||
TypeInformation<?> type = prop.getTypeInformation();
|
||||
|
||||
if (conversions.getPropertyValueConversions().hasValueConverter(prop)) {
|
||||
accessor.put(prop,
|
||||
conversions.getPropertyValueConversions().getValueConverter(prop).write(obj, new MongoConversionContext(prop, this)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (prop.isUnwrapped()) {
|
||||
|
||||
Document target = new Document();
|
||||
@@ -719,6 +1012,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
if (prop.isAssociation() && prop.isAnnotationPresent(Reference.class)) {
|
||||
|
||||
accessor.put(prop, new DocumentPointerFactory(conversionService, mappingContext)
|
||||
.computePointer(mappingContext, prop, obj, valueType.getType()).getPointer());
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* If we have a LazyLoadingProxy we make sure it is initialized first.
|
||||
*/
|
||||
@@ -757,10 +1057,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (!property.isDbReference()) {
|
||||
|
||||
if (property.isAssociation()) {
|
||||
|
||||
List<Object> targetCollection = collection.stream().map(it -> {
|
||||
return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType())
|
||||
.getPointer();
|
||||
}).collect(Collectors.toList());
|
||||
|
||||
return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class),
|
||||
new ArrayList<>());
|
||||
}
|
||||
|
||||
if (property.hasExplicitWriteTarget()) {
|
||||
return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>());
|
||||
}
|
||||
return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList());
|
||||
|
||||
return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>());
|
||||
}
|
||||
|
||||
List<Object> dbList = new ArrayList<>(collection.size());
|
||||
@@ -789,7 +1101,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(map, "Given map must not be null!");
|
||||
Assert.notNull(property, "PersistentProperty must not be null!");
|
||||
|
||||
if (!property.isDbReference()) {
|
||||
if (!property.isAssociation()) {
|
||||
return writeMapInternal(map, new Document(), property.getTypeInformation());
|
||||
}
|
||||
|
||||
@@ -803,7 +1115,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = prepareMapKey(key.toString());
|
||||
document.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
if (property.isDbReference()) {
|
||||
document.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
} else {
|
||||
document.put(simpleKey, documentPointerFactory
|
||||
.computePointer(mappingContext, property, value, property.getActualType()).getPointer());
|
||||
}
|
||||
|
||||
} else {
|
||||
throw new MappingException("Cannot use a complex object as a key value.");
|
||||
@@ -840,7 +1157,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
collection.add(getPotentiallyConvertedSimpleWrite(element,
|
||||
componentType != null ? componentType.getType() : Object.class));
|
||||
} else if (element instanceof Collection || elementType.isArray()) {
|
||||
collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new BasicDBList()));
|
||||
collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new ArrayList<>()));
|
||||
} else {
|
||||
Document document = new Document();
|
||||
writeInternal(element, document, componentType);
|
||||
@@ -872,7 +1189,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
writeSimpleInternal(val, bson, simpleKey);
|
||||
} else if (val instanceof Collection || val.getClass().isArray()) {
|
||||
BsonUtils.addToMap(bson, simpleKey,
|
||||
writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList()));
|
||||
writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new ArrayList<>()));
|
||||
} else {
|
||||
Document document = new Document();
|
||||
TypeInformation<?> valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType()
|
||||
@@ -966,7 +1283,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
boolean notTheSameClass = !valueType.equals(reference);
|
||||
if (notTheSameClass) {
|
||||
typeMapper.writeType(valueType, bson);
|
||||
getTypeMapper().writeType(valueType, bson);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -983,6 +1300,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property) {
|
||||
DocumentAccessor accessor = new DocumentAccessor(bson);
|
||||
|
||||
if (conversions.getPropertyValueConversions().hasValueConverter(property)) {
|
||||
accessor.put(property,
|
||||
conversions.getPropertyValueConversions().getValueConverter(property)
|
||||
.write(value, new MongoConversionContext(property, this)));
|
||||
return;
|
||||
}
|
||||
|
||||
accessor.put(property, getPotentiallyConvertedSimpleWrite(value,
|
||||
property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class));
|
||||
}
|
||||
@@ -1174,7 +1499,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(bson, "Document must not be null!");
|
||||
Assert.notNull(targetType, "TypeInformation must not be null!");
|
||||
|
||||
Class<?> mapType = typeMapper.readType(bson, targetType).getType();
|
||||
Class<?> mapType = getTypeMapper().readType(bson, targetType).getType();
|
||||
|
||||
TypeInformation<?> keyType = targetType.getComponentType();
|
||||
TypeInformation<?> valueType = targetType.getMapValueType() == null ? ClassTypeInformation.OBJECT
|
||||
@@ -1193,7 +1518,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
sourceMap.forEach((k, v) -> {
|
||||
|
||||
if (typeMapper.isTypeKey(k)) {
|
||||
if (getTypeMapper().isTypeKey(k)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1356,7 +1681,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
}
|
||||
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
if (getTypeMapper().isTypeKey(key)) {
|
||||
|
||||
keyToRemove = key;
|
||||
|
||||
@@ -1527,6 +1852,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
target.conversions = conversions;
|
||||
target.spELContext = spELContext;
|
||||
target.setInstantiators(instantiators);
|
||||
target.defaultTypeMapper = defaultTypeMapper;
|
||||
target.typeMapper = typeMapper;
|
||||
target.setCodecRegistryProvider(dbFactory);
|
||||
target.afterPropertiesSet();
|
||||
@@ -1576,6 +1902,14 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return true;
|
||||
}
|
||||
|
||||
static Predicate<MongoPersistentProperty> isIdentifier(PersistentEntity<?, ?> entity) {
|
||||
return entity::isIdProperty;
|
||||
}
|
||||
|
||||
static Predicate<MongoPersistentProperty> isConstructorArgument(PersistentEntity<?, ?> entity) {
|
||||
return entity::isConstructorArgument;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field
|
||||
* of the configured source {@link Document}.
|
||||
@@ -1637,8 +1971,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return null;
|
||||
}
|
||||
|
||||
if (context.conversions.getPropertyValueConversions().hasValueConverter(property)) {
|
||||
return (T) context.conversions.getPropertyValueConversions().getValueConverter(property).read(value,
|
||||
new MongoConversionContext(property, context.sourceConverter));
|
||||
}
|
||||
|
||||
return (T) context.convert(value, property.getTypeInformation());
|
||||
}
|
||||
|
||||
public MongoDbPropertyValueProvider withContext(ConversionContext context) {
|
||||
|
||||
return context == this.context ? this : new MongoDbPropertyValueProvider(context, accessor, evaluator);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1686,6 +2030,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler);
|
||||
}
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return (T) dbRefResolver.resolveReference(property, accessor.get(property), referenceLookupDelegate,
|
||||
context::convert);
|
||||
}
|
||||
|
||||
return super.getPropertyValue(property);
|
||||
}
|
||||
}
|
||||
@@ -1842,6 +2191,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
public org.springframework.data.util.TypeInformation<? extends S> specialize(ClassTypeInformation type) {
|
||||
return delegate.specialize(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeDescriptor toTypeDescriptor() {
|
||||
return delegate.toTypeDescriptor();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1852,19 +2206,22 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
protected static class ConversionContext {
|
||||
|
||||
private final org.springframework.data.convert.CustomConversions conversions;
|
||||
private final ObjectPath path;
|
||||
private final ContainerValueConverter<Bson> documentConverter;
|
||||
private final ContainerValueConverter<Collection<?>> collectionConverter;
|
||||
private final ContainerValueConverter<Bson> mapConverter;
|
||||
private final ContainerValueConverter<DBRef> dbRefConverter;
|
||||
private final ValueConverter<Object> elementConverter;
|
||||
final MongoConverter sourceConverter;
|
||||
final org.springframework.data.convert.CustomConversions conversions;
|
||||
final ObjectPath path;
|
||||
final ContainerValueConverter<Bson> documentConverter;
|
||||
final ContainerValueConverter<Collection<?>> collectionConverter;
|
||||
final ContainerValueConverter<Bson> mapConverter;
|
||||
final ContainerValueConverter<DBRef> dbRefConverter;
|
||||
final ValueConverter<Object> elementConverter;
|
||||
|
||||
ConversionContext(org.springframework.data.convert.CustomConversions customConversions, ObjectPath path,
|
||||
ConversionContext(MongoConverter sourceConverter,
|
||||
org.springframework.data.convert.CustomConversions customConversions, ObjectPath path,
|
||||
ContainerValueConverter<Bson> documentConverter, ContainerValueConverter<Collection<?>> collectionConverter,
|
||||
ContainerValueConverter<Bson> mapConverter, ContainerValueConverter<DBRef> dbRefConverter,
|
||||
ValueConverter<Object> elementConverter) {
|
||||
|
||||
this.sourceConverter = sourceConverter;
|
||||
this.conversions = customConversions;
|
||||
this.path = path;
|
||||
this.documentConverter = documentConverter;
|
||||
@@ -1908,7 +2265,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (typeHint.isMap()) {
|
||||
|
||||
if(ClassUtils.isAssignable(Document.class, typeHint.getType())) {
|
||||
if (ClassUtils.isAssignable(Document.class, typeHint.getType())) {
|
||||
return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint);
|
||||
}
|
||||
|
||||
@@ -1916,7 +2273,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (S) mapConverter.convert(this, BsonUtils.asBson(source), typeHint);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass()));
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Expected map like structure but found %s", source.getClass()));
|
||||
}
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
@@ -1945,14 +2303,18 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
Assert.notNull(currentPath, "ObjectPath must not be null");
|
||||
|
||||
return new ConversionContext(conversions, currentPath, documentConverter, collectionConverter, mapConverter,
|
||||
dbRefConverter, elementConverter);
|
||||
return new ConversionContext(sourceConverter, conversions, currentPath, documentConverter, collectionConverter,
|
||||
mapConverter, dbRefConverter, elementConverter);
|
||||
}
|
||||
|
||||
public ObjectPath getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public ConversionContext forProperty(String name) {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a simple {@code source} value into {@link TypeInformation the target type}.
|
||||
*
|
||||
@@ -1977,4 +2339,58 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class PropertyTranslatingPropertyAccessor<T> implements PersistentPropertyPathAccessor<T> {
|
||||
|
||||
private final PersistentPropertyAccessor<T> delegate;
|
||||
private final PersistentPropertyTranslator propertyTranslator;
|
||||
|
||||
private PropertyTranslatingPropertyAccessor(PersistentPropertyAccessor<T> delegate,
|
||||
PersistentPropertyTranslator propertyTranslator) {
|
||||
this.delegate = delegate;
|
||||
this.propertyTranslator = propertyTranslator;
|
||||
}
|
||||
|
||||
static <T> PersistentPropertyAccessor<T> create(PersistentPropertyAccessor<T> delegate,
|
||||
PersistentPropertyTranslator propertyTranslator) {
|
||||
return new PropertyTranslatingPropertyAccessor<>(delegate, propertyTranslator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentProperty property, @Nullable Object value) {
|
||||
delegate.setProperty(translate(property), value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(PersistentProperty<?> property) {
|
||||
return delegate.getProperty(translate(property));
|
||||
}
|
||||
|
||||
@Override
|
||||
public T getBean() {
|
||||
return delegate.getBean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path, Object value,
|
||||
AccessOptions.SetOptions options) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path,
|
||||
AccessOptions.GetOptions context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private MongoPersistentProperty translate(PersistentProperty<?> property) {
|
||||
return propertyTranslator.translate((MongoPersistentProperty) property);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright 2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.convert.ValueConversionContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* {@link ValueConversionContext} that allows to delegate read/write to an underlying {@link MongoConverter}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.4
|
||||
*/
|
||||
public class MongoConversionContext implements ValueConversionContext<MongoPersistentProperty> {
|
||||
|
||||
private final MongoPersistentProperty persistentProperty;
|
||||
private final MongoConverter mongoConverter;
|
||||
|
||||
public MongoConversionContext(MongoPersistentProperty persistentProperty, MongoConverter mongoConverter) {
|
||||
|
||||
this.persistentProperty = persistentProperty;
|
||||
this.mongoConverter = mongoConverter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getProperty() {
|
||||
return persistentProperty;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T write(@Nullable Object value, TypeInformation<T> target) {
|
||||
return (T) mongoConverter.convertToMongoType(value, target);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T read(@Nullable Object value, TypeInformation<T> target) {
|
||||
return value instanceof Bson ? mongoConverter.read(target.getType(), (Bson) value)
|
||||
: ValueConversionContext.super.read(value, target);
|
||||
}
|
||||
}
|
||||
@@ -19,13 +19,17 @@ import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.EntityConverter;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -54,6 +58,35 @@ public interface MongoConverter
|
||||
*/
|
||||
MongoTypeMapper getTypeMapper();
|
||||
|
||||
/**
|
||||
* Returns the {@link ProjectionFactory} for this converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @since 3.4
|
||||
*/
|
||||
ProjectionFactory getProjectionFactory();
|
||||
|
||||
/**
|
||||
* Returns the {@link CustomConversions} for this converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @since 3.4
|
||||
*/
|
||||
CustomConversions getCustomConversions();
|
||||
|
||||
/**
|
||||
* Apply a projection to {@link Bson} and return the projection return type {@code R}.
|
||||
* {@link EntityProjection#isProjection() Non-projecting} descriptors fall back to {@link #read(Class, Object) regular
|
||||
* object materialization}.
|
||||
*
|
||||
* @param descriptor the projection descriptor, must not be {@literal null}.
|
||||
* @param bson must not be {@literal null}.
|
||||
* @param <R>
|
||||
* @return a new instance of the projection return type {@code R}.
|
||||
* @since 3.4
|
||||
*/
|
||||
<R> R project(EntityProjection<R, ?> descriptor, Bson bson);
|
||||
|
||||
/**
|
||||
* Mapping function capable of converting values into a desired target type by eg. extracting the actual java type
|
||||
* from a given {@link BsonValue}.
|
||||
@@ -154,4 +187,5 @@ public interface MongoConverter
|
||||
return convertToMongoType(id,(TypeInformation<?>) null);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -36,9 +36,16 @@ import org.springframework.core.convert.TypeDescriptor;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.convert.converter.ConverterFactory;
|
||||
import org.springframework.core.convert.converter.GenericConverter;
|
||||
import org.springframework.data.convert.ConverterBuilder;
|
||||
import org.springframework.data.convert.JodaTimeConverters;
|
||||
import org.springframework.data.convert.PropertyValueConversions;
|
||||
import org.springframework.data.convert.PropertyValueConverter;
|
||||
import org.springframework.data.convert.PropertyValueConverterFactory;
|
||||
import org.springframework.data.convert.PropertyValueConverterRegistrar;
|
||||
import org.springframework.data.convert.SimplePropertyValueConversions;
|
||||
import org.springframework.data.convert.WritingConverter;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -159,6 +166,9 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
private boolean useNativeDriverJavaTimeCodecs = false;
|
||||
private final List<Object> customConverters = new ArrayList<>();
|
||||
|
||||
private final PropertyValueConversions internalValueConversion = PropertyValueConversions.simple(it -> {});
|
||||
private PropertyValueConversions propertyValueConversions = internalValueConversion;
|
||||
|
||||
/**
|
||||
* Create a {@link MongoConverterConfigurationAdapter} using the provided {@code converters} and our own codecs for
|
||||
* JSR-310 types.
|
||||
@@ -178,7 +188,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether or not to use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for
|
||||
* Set whether to or not to use the native MongoDB Java Driver {@link org.bson.codecs.Codec codes} for
|
||||
* {@link org.bson.codecs.jsr310.LocalDateCodec LocalDate}, {@link org.bson.codecs.jsr310.LocalTimeCodec LocalTime}
|
||||
* and {@link org.bson.codecs.jsr310.LocalDateTimeCodec LocalDateTime} using a {@link ZoneOffset#UTC}.
|
||||
*
|
||||
@@ -230,6 +240,27 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gateway to register property specific converters.
|
||||
*
|
||||
* @param configurationAdapter must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.4
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter configurePropertyConversions(
|
||||
Consumer<PropertyValueConverterRegistrar<MongoPersistentProperty>> configurationAdapter) {
|
||||
|
||||
Assert.state(valueConversions() instanceof SimplePropertyValueConversions,
|
||||
"Configured PropertyValueConversions does not allow setting custom ConverterRegistry.");
|
||||
|
||||
PropertyValueConverterRegistrar propertyValueConverterRegistrar = new PropertyValueConverterRegistrar();
|
||||
configurationAdapter.accept(propertyValueConverterRegistrar);
|
||||
|
||||
((SimplePropertyValueConversions) valueConversions())
|
||||
.setValueConverterRegistry(propertyValueConverterRegistrar.buildRegistry());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom {@link ConverterFactory} implementation.
|
||||
*
|
||||
@@ -244,7 +275,8 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
|
||||
/**
|
||||
* Add {@link Converter converters}, {@link ConverterFactory factories}, ...
|
||||
* Add {@link Converter converters}, {@link ConverterFactory factories}, {@link ConverterBuilder.ConverterAware
|
||||
* converter-aware objects}, and {@link GenericConverter generic converters}.
|
||||
*
|
||||
* @param converters must not be {@literal null} nor contain {@literal null} values.
|
||||
* @return this.
|
||||
@@ -258,10 +290,59 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a custom/default {@link PropertyValueConverterFactory} implementation used to serve
|
||||
* {@link PropertyValueConverter}.
|
||||
*
|
||||
* @param converterFactory must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.4
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter registerPropertyValueConverterFactory(
|
||||
PropertyValueConverterFactory converterFactory) {
|
||||
|
||||
Assert.state(valueConversions() instanceof SimplePropertyValueConversions,
|
||||
"Configured PropertyValueConversions does not allow setting custom ConverterRegistry.");
|
||||
|
||||
((SimplePropertyValueConversions) valueConversions()).setConverterFactory(converterFactory);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally set the {@link PropertyValueConversions} to be applied during mapping.
|
||||
* <p>
|
||||
* Use this method if {@link #configurePropertyConversions(Consumer)} and
|
||||
* {@link #registerPropertyValueConverterFactory(PropertyValueConverterFactory)} are not sufficient.
|
||||
*
|
||||
* @param valueConversions must not be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.4
|
||||
*/
|
||||
public MongoConverterConfigurationAdapter setPropertyValueConversions(PropertyValueConversions valueConversions) {
|
||||
|
||||
Assert.notNull(valueConversions, "PropertyValueConversions must not be null");
|
||||
this.propertyValueConversions = valueConversions;
|
||||
return this;
|
||||
}
|
||||
|
||||
PropertyValueConversions valueConversions() {
|
||||
|
||||
if (this.propertyValueConversions == null) {
|
||||
this.propertyValueConversions = internalValueConversion;
|
||||
}
|
||||
|
||||
return this.propertyValueConversions;
|
||||
}
|
||||
|
||||
ConverterConfiguration createConverterConfiguration() {
|
||||
|
||||
if (hasDefaultPropertyValueConversions() && propertyValueConversions instanceof SimplePropertyValueConversions) {
|
||||
((SimplePropertyValueConversions) propertyValueConversions).init();
|
||||
}
|
||||
|
||||
if (!useNativeDriverJavaTimeCodecs) {
|
||||
return new ConverterConfiguration(STORE_CONVERSIONS, this.customConverters);
|
||||
return new ConverterConfiguration(STORE_CONVERSIONS, this.customConverters, convertiblePair -> true,
|
||||
this.propertyValueConversions);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -286,7 +367,7 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
}, this.propertyValueConversions);
|
||||
}
|
||||
|
||||
private enum DateToUtcLocalDateTimeConverter implements Converter<Date, LocalDateTime> {
|
||||
@@ -315,5 +396,9 @@ public class MongoCustomConversions extends org.springframework.data.convert.Cus
|
||||
return DateToUtcLocalDateTimeConverter.INSTANCE.convert(source).toLocalDate();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean hasDefaultPropertyValueConversions() {
|
||||
return propertyValueConversions == internalValueConversion;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents}
|
||||
* for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(MongoDatabaseFactoryReferenceLoader.class);
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
|
||||
|
||||
MongoCollection<Document> collection = getCollection(context);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace(String.format("Bulk fetching %s from %s.%s.", referenceQuery,
|
||||
StringUtils.hasText(context.getDatabase()) ? context.getDatabase()
|
||||
: collection.getNamespace().getDatabaseName(),
|
||||
context.getCollection()));
|
||||
}
|
||||
|
||||
return referenceQuery.apply(collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoCollection} for a given {@link ReferenceCollection} from the underlying
|
||||
* {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the {@link MongoCollection} targeted by the {@link ReferenceCollection}.
|
||||
*/
|
||||
protected MongoCollection<Document> getCollection(ReferenceCollection context) {
|
||||
|
||||
return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(),
|
||||
Document.class);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright 2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.convert.PropertyValueConverter;
|
||||
|
||||
/**
|
||||
* MongoDB-specific {@link PropertyValueConverter} extension.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.4
|
||||
*/
|
||||
public interface MongoValueConverter<S, T> extends PropertyValueConverter<S, T, MongoConversionContext> {}
|
||||
@@ -17,6 +17,8 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.convert.EntityWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
@@ -61,6 +63,7 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity<?> entity) {
|
||||
return convertToMongoType(obj, entity.getTypeInformation());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link DBRef} to refer to the given object.
|
||||
*
|
||||
@@ -70,4 +73,17 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referingProperty);
|
||||
|
||||
/**
|
||||
* Creates a the {@link DocumentPointer} representing the link to another entity.
|
||||
*
|
||||
* @param source the object to create a document link to.
|
||||
* @param referringProperty the client-side property referring to the object which might carry additional metadata for
|
||||
* the {@link DBRef} object to create. Can be {@literal null}.
|
||||
* @return will never be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
default DocumentPointer<?> toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
|
||||
return () -> toDBRef(source, referringProperty);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
@@ -69,4 +70,11 @@ public enum NoOpDbRefResolver implements DbRefResolver {
|
||||
private <T> T handle() throws UnsupportedOperationException {
|
||||
throw new UnsupportedOperationException("DBRef resolution is not supported!");
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,14 +21,16 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
@@ -73,7 +75,7 @@ import com.mongodb.DBRef;
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(QueryMapper.class);
|
||||
protected static final Log LOGGER = LogFactory.getLog(QueryMapper.class);
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final Document META_TEXT_SCORE = new Document("$meta", "textScore");
|
||||
@@ -221,8 +223,8 @@ public class QueryMapper {
|
||||
|
||||
if (fields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
|
||||
}
|
||||
|
||||
Document target = new Document();
|
||||
|
||||
BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> {
|
||||
@@ -238,6 +240,18 @@ public class QueryMapper {
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds missing {@code $meta} representation if required.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param entity can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public Document addMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
return mapMetaAttributes(source, entity, MetaMapping.FORCE);
|
||||
}
|
||||
|
||||
private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity,
|
||||
MetaMapping metaMapping) {
|
||||
|
||||
@@ -420,6 +434,11 @@ public class QueryMapper {
|
||||
|
||||
Object value = applyFieldTargetTypeHintToValue(documentField, sourceValue);
|
||||
|
||||
if(documentField.getProperty() != null && converter.getCustomConversions().getPropertyValueConversions().hasValueConverter(documentField.getProperty())) {
|
||||
return converter.getCustomConversions().getPropertyValueConversions().getValueConverter(documentField.getProperty())
|
||||
.write(value, new MongoConversionContext(documentField.getProperty(), converter));
|
||||
}
|
||||
|
||||
if (documentField.isIdField() && !documentField.isAssociation()) {
|
||||
|
||||
if (isDBObject(value)) {
|
||||
@@ -516,6 +535,10 @@ public class QueryMapper {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = documentField.getPropertyEntity();
|
||||
return entity.hasIdProperty()
|
||||
&& (type.equals(DBRef.class) || entity.getRequiredIdProperty().getActualType().isAssignableFrom(type));
|
||||
@@ -629,7 +652,7 @@ public class QueryMapper {
|
||||
if (source instanceof Iterable) {
|
||||
BasicDBList result = new BasicDBList();
|
||||
for (Object element : (Iterable<?>) source) {
|
||||
result.add(createDbRefFor(element, property));
|
||||
result.add(createReferenceFor(element, property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -638,12 +661,12 @@ public class QueryMapper {
|
||||
Document result = new Document();
|
||||
Document dbObject = (Document) source;
|
||||
for (String key : dbObject.keySet()) {
|
||||
result.put(key, createDbRefFor(dbObject.get(key), property));
|
||||
result.put(key, createReferenceFor(dbObject.get(key), property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return createDbRefFor(source, property);
|
||||
return createReferenceFor(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -690,12 +713,17 @@ public class QueryMapper {
|
||||
return new AbstractMap.SimpleEntry<>(key, value);
|
||||
}
|
||||
|
||||
private DBRef createDbRefFor(Object source, MongoPersistentProperty property) {
|
||||
private Object createReferenceFor(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
return (DBRef) source;
|
||||
}
|
||||
|
||||
if (property != null && (property.isDocumentReference()
|
||||
|| (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) {
|
||||
return converter.toDocumentPointer(source, property).getPointer();
|
||||
}
|
||||
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
@@ -1214,9 +1242,9 @@ public class QueryMapper {
|
||||
|
||||
String types = StringUtils.collectionToDelimitedString(
|
||||
path.stream().map(it -> it.getType().getSimpleName()).collect(Collectors.toList()), " -> ");
|
||||
QueryMapper.LOGGER.info(
|
||||
"Could not map '{}'. Maybe a fragment in '{}' is considered a simple type. Mapper continues with {}.",
|
||||
path, types, pathExpression);
|
||||
QueryMapper.LOGGER.info(String.format(
|
||||
"Could not map '%s'. Maybe a fragment in '%s' is considered a simple type. Mapper continues with %s.",
|
||||
path, types, pathExpression));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a
|
||||
* {@link ReferenceLoader.DocumentReferenceQuery}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface ReferenceLoader {
|
||||
|
||||
/**
|
||||
* Obtain a single {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
|
||||
*
|
||||
* @param referenceQuery must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the matching {@link Document} or {@literal null} if none found.
|
||||
*/
|
||||
@Nullable
|
||||
default Document fetchOne(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
|
||||
|
||||
Iterator<Document> it = fetchMany(referenceQuery, context).iterator();
|
||||
return it.hasNext() ? it.next() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain multiple {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
|
||||
*
|
||||
* @param referenceQuery must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the matching {@link Document} or {@literal null} if none found.
|
||||
*/
|
||||
Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context);
|
||||
|
||||
/**
|
||||
* The {@link DocumentReferenceQuery} defines the criteria by which {@link Document documents} should be matched
|
||||
* applying potentially given order criteria.
|
||||
*/
|
||||
interface DocumentReferenceQuery {
|
||||
|
||||
/**
|
||||
* Get the query to obtain matching {@link Document documents}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Bson getQuery();
|
||||
|
||||
/**
|
||||
* Get the sort criteria for ordering results.
|
||||
*
|
||||
* @return an empty {@link Document} by default. Never {@literal null}.
|
||||
*/
|
||||
default Bson getSort() {
|
||||
return new Document();
|
||||
}
|
||||
|
||||
default Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
return restoreOrder(collection.find(getQuery()).sort(getSort()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore the order of fetched documents.
|
||||
*
|
||||
* @param documents must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
default Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
static DocumentReferenceQuery forSingleDocument(Bson bson) {
|
||||
|
||||
return new DocumentReferenceQuery() {
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return bson;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
|
||||
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
|
||||
return result != null ? Collections.singleton(result) : Collections.emptyList();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static DocumentReferenceQuery forManyDocuments(Bson bson) {
|
||||
|
||||
return new DocumentReferenceQuery() {
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return bson;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
return collection.find(getQuery()).sort(getSort());
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,491 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.SpELContext;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingContext;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
|
||||
import org.springframework.data.mongodb.util.json.ValueProvider;
|
||||
import org.springframework.data.mongodb.util.spel.ExpressionUtils;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* A common delegate for {@link ReferenceResolver} implementations to resolve a reference to one/many target documents
|
||||
* that are converted to entities.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.3
|
||||
*/
|
||||
public final class ReferenceLookupDelegate {
|
||||
|
||||
private static final Document NO_RESULTS_PREDICATE = new Document("_id", new Document("$exists", false));
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final SpELContext spELContext;
|
||||
private final ParameterBindingDocumentCodec codec;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReferenceLookupDelegate}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param spELContext must not be {@literal null}.
|
||||
*/
|
||||
public ReferenceLookupDelegate(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
SpELContext spELContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null");
|
||||
Assert.notNull(spELContext, "SpELContext must not be null");
|
||||
|
||||
this.mappingContext = mappingContext;
|
||||
this.spELContext = spELContext;
|
||||
this.codec = new ParameterBindingDocumentCodec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the reference expressed by the given property.
|
||||
*
|
||||
* @param property the reference defining property. Must not be {@literal null}. THe
|
||||
* @param source the source value identifying to the referenced entity. Must not be {@literal null}.
|
||||
* @param lookupFunction to execute a lookup query. Must not be {@literal null}.
|
||||
* @param entityReader the callback to convert raw source values into actual domain types. Must not be
|
||||
* {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public Object readReference(MongoPersistentProperty property, Object source, LookupFunction lookupFunction,
|
||||
MongoEntityReader entityReader) {
|
||||
|
||||
Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource()
|
||||
: source;
|
||||
|
||||
DocumentReferenceQuery filter = computeFilter(property, source, spELContext);
|
||||
ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext);
|
||||
|
||||
Iterable<Document> result = lookupFunction.apply(filter, referenceCollection);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return entityReader.read(result, property.getTypeInformation());
|
||||
}
|
||||
|
||||
if (!result.iterator().hasNext()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Object resultValue = result.iterator().next();
|
||||
return resultValue != null ? entityReader.read(resultValue, property.getTypeInformation()) : null;
|
||||
}
|
||||
|
||||
private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value,
|
||||
SpELContext spELContext) {
|
||||
|
||||
// Use the first value as a reference for others in case of collection like
|
||||
if (value instanceof Iterable) {
|
||||
|
||||
Iterator<?> iterator = ((Iterable<?>) value).iterator();
|
||||
value = iterator.hasNext() ? iterator.next() : new Document();
|
||||
}
|
||||
|
||||
// handle DBRef value
|
||||
if (value instanceof DBRef) {
|
||||
return ReferenceCollection.fromDBRef((DBRef) value);
|
||||
}
|
||||
|
||||
String collection = mappingContext.getRequiredPersistentEntity(property.getAssociationTargetType()).getCollection();
|
||||
|
||||
if (value instanceof Document) {
|
||||
|
||||
Document documentPointer = (Document) value;
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
|
||||
ParameterBindingContext bindingContext = bindingContext(property, value, spELContext);
|
||||
DocumentReference documentReference = property.getDocumentReference();
|
||||
|
||||
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext,
|
||||
() -> documentPointer.get("db", String.class));
|
||||
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext,
|
||||
() -> documentPointer.get("collection", collection));
|
||||
return new ReferenceCollection(targetDatabase, targetCollection);
|
||||
}
|
||||
|
||||
return new ReferenceCollection(documentPointer.getString("db"), documentPointer.get("collection", collection));
|
||||
}
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
|
||||
ParameterBindingContext bindingContext = bindingContext(property, value, spELContext);
|
||||
DocumentReference documentReference = property.getDocumentReference();
|
||||
|
||||
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null);
|
||||
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> collection);
|
||||
|
||||
return new ReferenceCollection(targetDatabase, targetCollection);
|
||||
}
|
||||
|
||||
return new ReferenceCollection(null, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the given {@link ParameterBindingContext} to compute potential expressions against the value.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param bindingContext must not be {@literal null}.
|
||||
* @param defaultValue
|
||||
* @param <T>
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier<T> defaultValue) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return defaultValue.get();
|
||||
}
|
||||
|
||||
// parameter binding requires a document, since we do not have one, construct it.
|
||||
if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) {
|
||||
String s = "{ 'target-value' : " + value + "}";
|
||||
T evaluated = (T) codec.decode(s, bindingContext).get("target-value");
|
||||
return evaluated != null ? evaluated : defaultValue.get();
|
||||
}
|
||||
|
||||
if (BsonUtils.isJsonDocument(value)) {
|
||||
return (T) codec.decode(value, bindingContext);
|
||||
}
|
||||
|
||||
if (!value.startsWith("#") && ExpressionUtils.detectExpression(value) == null) {
|
||||
return (T) value;
|
||||
}
|
||||
|
||||
T evaluated = (T) bindingContext.evaluateExpression(value);
|
||||
return evaluated != null ? evaluated : defaultValue.get();
|
||||
}
|
||||
|
||||
ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
ValueProvider valueProvider = valueProviderFor(DocumentReferenceSource.getTargetSource(source));
|
||||
|
||||
return new ParameterBindingContext(valueProvider, spELContext.getParser(),
|
||||
() -> evaluationContextFor(property, source, spELContext));
|
||||
}
|
||||
|
||||
ValueProvider valueProviderFor(Object source) {
|
||||
|
||||
return (index) -> {
|
||||
if (source instanceof Document) {
|
||||
return Streamable.of(((Document) source).values()).toList().get(index);
|
||||
}
|
||||
return source;
|
||||
};
|
||||
}
|
||||
|
||||
EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
Object target = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource()
|
||||
: source;
|
||||
|
||||
if (target == null) {
|
||||
target = new Document();
|
||||
}
|
||||
|
||||
EvaluationContext ctx = spELContext.getEvaluationContext(target);
|
||||
ctx.setVariable("target", target);
|
||||
ctx.setVariable("self", DocumentReferenceSource.getSelf(source));
|
||||
ctx.setVariable(property.getName(), target);
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the query to retrieve linked documents.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @param spELContext must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference()
|
||||
: ReferenceEmulatingDocumentReference.INSTANCE;
|
||||
|
||||
String lookup = documentReference.lookup();
|
||||
|
||||
Object value = DocumentReferenceSource.getTargetSource(source);
|
||||
|
||||
Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext),
|
||||
Document::new);
|
||||
|
||||
if (property.isCollectionLike() && (value instanceof Collection || value == null)) {
|
||||
|
||||
if (value == null) {
|
||||
return new ListDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)),
|
||||
sort);
|
||||
}
|
||||
|
||||
Collection<Object> objects = (Collection<Object>) value;
|
||||
|
||||
if (objects.isEmpty()) {
|
||||
return new ListDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort);
|
||||
}
|
||||
|
||||
List<Document> ors = new ArrayList<>(objects.size());
|
||||
for (Object entry : objects) {
|
||||
|
||||
Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext));
|
||||
ors.add(decoded);
|
||||
}
|
||||
|
||||
return new ListDocumentReferenceQuery(new Document("$or", ors), sort);
|
||||
}
|
||||
|
||||
if (property.isMap() && value instanceof Map) {
|
||||
|
||||
Set<Entry<Object, Object>> entries = ((Map<Object, Object>) value).entrySet();
|
||||
if (entries.isEmpty()) {
|
||||
return new MapDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort, Collections.emptyMap());
|
||||
}
|
||||
|
||||
Map<Object, Document> filterMap = new LinkedHashMap<>(entries.size());
|
||||
|
||||
for (Entry<Object, Object> entry : entries) {
|
||||
|
||||
Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext));
|
||||
filterMap.put(entry.getKey(), decoded);
|
||||
}
|
||||
|
||||
return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap);
|
||||
}
|
||||
|
||||
return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), sort);
|
||||
}
|
||||
|
||||
enum ReferenceEmulatingDocumentReference implements DocumentReference {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Class<? extends Annotation> annotationType() {
|
||||
return DocumentReference.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String db() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String collection() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String lookup() {
|
||||
return "{ '_id' : ?#{#target} }";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sort() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean lazy() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation fetching a single {@link Document}.
|
||||
*/
|
||||
static class SingleDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
|
||||
public SingleDocumentReferenceQuery(Document query, Document sort) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
|
||||
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
|
||||
return result != null ? Collections.singleton(result) : Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
|
||||
* {@link Map} structure. Restores the original map order by matching individual query documents against the actual
|
||||
* values.
|
||||
*/
|
||||
static class MapDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
private final Map<Object, Document> filterOrderMap;
|
||||
|
||||
public MapDocumentReferenceQuery(Document query, Document sort, Map<Object, Document> filterOrderMap) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
this.filterOrderMap = filterOrderMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
|
||||
Map<String, Object> targetMap = new LinkedHashMap<>();
|
||||
List<Document> collected = documents instanceof List ? (List<Document>) documents
|
||||
: Streamable.of(documents).toList();
|
||||
|
||||
for (Entry<Object, Document> filterMapping : filterOrderMap.entrySet()) {
|
||||
|
||||
Optional<Document> first = collected.stream()
|
||||
.filter(it -> it.entrySet().containsAll(filterMapping.getValue().entrySet())).findFirst();
|
||||
|
||||
targetMap.put(filterMapping.getKey().toString(), first.orElse(null));
|
||||
}
|
||||
return Collections.singleton(new Document(targetMap));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
|
||||
* {@link Collection} like structure. Restores the original order by matching individual query documents against the
|
||||
* actual values.
|
||||
*/
|
||||
static class ListDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
|
||||
public ListDocumentReferenceQuery(Document query, Document sort) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
|
||||
List<Document> target = documents instanceof List ? (List<Document>) documents
|
||||
: Streamable.of(documents).toList();
|
||||
|
||||
if (!sort.isEmpty() || !query.containsKey("$or")) {
|
||||
return target;
|
||||
}
|
||||
|
||||
List<Document> ors = query.get("$or", List.class);
|
||||
return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public Document getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
int compareAgainstReferenceIndex(List<Document> referenceList, Document document1, Document document2) {
|
||||
|
||||
for (Document document : referenceList) {
|
||||
|
||||
Set<Entry<String, Object>> entries = document.entrySet();
|
||||
if (document1.entrySet().containsAll(entries)) {
|
||||
return -1;
|
||||
}
|
||||
if (document2.entrySet().containsAll(entries)) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
return referenceList.size();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The function that can execute a given {@link DocumentReferenceQuery} within the {@link ReferenceCollection} to
|
||||
* obtain raw results.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
interface LookupFunction {
|
||||
|
||||
/**
|
||||
* @param referenceQuery never {@literal null}.
|
||||
* @param referenceCollection never {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Iterable<Document> apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* The {@link ReferenceResolver} allows to load and convert linked entities.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ReferenceResolver {
|
||||
|
||||
/**
|
||||
* Resolve the association defined via the given property from a given source value. May return a
|
||||
* {@link LazyLoadingProxy proxy instance} in case of a lazy loading association. The resolved value is assignable to
|
||||
* {@link PersistentProperty#getType()}.
|
||||
*
|
||||
* @param property the association defining property.
|
||||
* @param source the association source value.
|
||||
* @param referenceLookupDelegate the lookup executing component.
|
||||
* @param entityReader conversion function capable of constructing entities from raw source.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader);
|
||||
|
||||
/**
|
||||
* {@link ReferenceCollection} is a value object that contains information about the target database and collection
|
||||
* name of an association.
|
||||
*/
|
||||
class ReferenceCollection {
|
||||
|
||||
@Nullable //
|
||||
private final String database;
|
||||
private final String collection;
|
||||
|
||||
/**
|
||||
* @param database can be {@literal null} to indicate the configured default
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase() database} should be used.
|
||||
* @param collection the target collection name. Must not be {@literal null}.
|
||||
*/
|
||||
public ReferenceCollection(@Nullable String database, String collection) {
|
||||
|
||||
Assert.hasText(collection, "Collection must not be empty or null!");
|
||||
|
||||
this.database = database;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link ReferenceCollection} from the given {@link DBRef}.
|
||||
*
|
||||
* @param dbRef must not be {@literal null}.
|
||||
* @return new instance of {@link ReferenceCollection}.
|
||||
*/
|
||||
public static ReferenceCollection fromDBRef(DBRef dbRef) {
|
||||
return new ReferenceCollection(dbRef.getDatabaseName(), dbRef.getCollectionName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the target collection name.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getCollection() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the target database name. If {@literal null} the default database should be used.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getDatabase() {
|
||||
return database;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Domain type conversion callback interface that allows to read the {@code source} object into a mapped object.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
interface MongoEntityReader {
|
||||
|
||||
/**
|
||||
* Read values from the given source into an object defined via the given {@link TypeInformation}.
|
||||
*
|
||||
* @param source never {@literal null}.
|
||||
* @param typeInformation information about the desired target type.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Object read(Object source, TypeInformation<?> typeInformation);
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,8 @@ import java.util.List;
|
||||
import org.springframework.data.geo.Point;
|
||||
|
||||
/**
|
||||
* {@link GeoJson} representation of {@link Point}.
|
||||
* {@link GeoJson} representation of {@link Point}. Uses {@link Point#getX()} as {@literal longitude} and
|
||||
* {@link Point#getY()} as {@literal latitude}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 1.7
|
||||
@@ -36,8 +37,8 @@ public class GeoJsonPoint extends Point implements GeoJson<List<Double>> {
|
||||
/**
|
||||
* Creates {@link GeoJsonPoint} for given coordinates.
|
||||
*
|
||||
* @param x
|
||||
* @param y
|
||||
* @param x longitude between {@literal -180} and {@literal 180} (inclusive).
|
||||
* @param y latitude between {@literal -90} and {@literal 90} (inclusive).
|
||||
*/
|
||||
public GeoJsonPoint(double x, double y) {
|
||||
super(x, y);
|
||||
@@ -45,6 +46,8 @@ public class GeoJsonPoint extends Point implements GeoJson<List<Double>> {
|
||||
|
||||
/**
|
||||
* Creates {@link GeoJsonPoint} for given {@link Point}.
|
||||
* <p>
|
||||
* {@link Point#getX()} translates to {@literal longitude}, {@link Point#getY()} to {@literal latitude}.
|
||||
*
|
||||
* @param point must not be {@literal null}.
|
||||
*/
|
||||
@@ -61,9 +64,10 @@ public class GeoJsonPoint extends Point implements GeoJson<List<Double>> {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.geo.GeoJson#getCoordinates()
|
||||
/**
|
||||
* Obtain the coordinates (x/longitude, y/latitude) array.
|
||||
*
|
||||
* @return the coordinates putting {@link #getX() x/longitude} first, and {@link #getY() y/latitude} second.
|
||||
*/
|
||||
@Override
|
||||
public List<Double> getCoordinates() {
|
||||
|
||||
@@ -29,7 +29,17 @@ import org.springframework.util.ObjectUtils;
|
||||
public final class IndexField {
|
||||
|
||||
enum Type {
|
||||
GEO, TEXT, DEFAULT, HASH;
|
||||
GEO, TEXT, DEFAULT,
|
||||
|
||||
/**
|
||||
* @since 2.2
|
||||
*/
|
||||
HASH,
|
||||
|
||||
/**
|
||||
* @since 3.3
|
||||
*/
|
||||
WILDCARD;
|
||||
}
|
||||
|
||||
private final String key;
|
||||
@@ -48,7 +58,7 @@ public final class IndexField {
|
||||
if (Type.GEO.equals(type) || Type.TEXT.equals(type)) {
|
||||
Assert.isNull(direction, "Geo/Text indexes must not have a direction!");
|
||||
} else {
|
||||
if (!Type.HASH.equals(type)) {
|
||||
if (!(Type.HASH.equals(type) || Type.WILDCARD.equals(type))) {
|
||||
Assert.notNull(direction, "Default indexes require a direction");
|
||||
}
|
||||
}
|
||||
@@ -77,6 +87,18 @@ public final class IndexField {
|
||||
return new IndexField(key, null, Type.HASH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@literal wildcard} {@link IndexField} for the given key. The {@code key} must follow the
|
||||
* {@code fieldName.$**} notation.
|
||||
*
|
||||
* @param key must not be {@literal null} or empty.
|
||||
* @return new instance of {@link IndexField}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static IndexField wildcard(String key) {
|
||||
return new IndexField(key, null, Type.WILDCARD);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a geo {@link IndexField} for the given key.
|
||||
*
|
||||
@@ -142,6 +164,16 @@ public final class IndexField {
|
||||
return Type.HASH.equals(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the {@link IndexField} is contains a {@literal wildcard} expression.
|
||||
*
|
||||
* @return {@literal true} if {@link IndexField} contains a wildcard {@literal $**}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public boolean isWildcard() {
|
||||
return Type.WILDCARD.equals(type);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
|
||||
@@ -55,6 +55,7 @@ public class IndexInfo {
|
||||
private @Nullable Duration expireAfter;
|
||||
private @Nullable String partialFilterExpression;
|
||||
private @Nullable Document collation;
|
||||
private @Nullable Document wildcardProjection;
|
||||
|
||||
public IndexInfo(List<IndexField> indexFields, String name, boolean unique, boolean sparse, String language) {
|
||||
|
||||
@@ -99,6 +100,8 @@ public class IndexInfo {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals("hashed", value)) {
|
||||
indexFields.add(IndexField.hashed(key));
|
||||
} else if (key.endsWith("$**")) {
|
||||
indexFields.add(IndexField.wildcard(key));
|
||||
} else {
|
||||
|
||||
Double keyValue = new Double(value.toString());
|
||||
@@ -131,6 +134,10 @@ public class IndexInfo {
|
||||
info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class));
|
||||
}
|
||||
|
||||
if (sourceDocument.containsKey("wildcardProjection")) {
|
||||
info.wildcardProjection = sourceDocument.get("wildcardProjection", Document.class);
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
@@ -216,6 +223,16 @@ public class IndexInfo {
|
||||
return Optional.ofNullable(collation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get {@literal wildcardProjection} information.
|
||||
*
|
||||
* @return {@link Optional#empty() empty} if not set.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<Document> getWildcardProjection() {
|
||||
return Optional.ofNullable(wildcardProjection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the duration after which documents within the index expire.
|
||||
*
|
||||
@@ -234,6 +251,14 @@ public class IndexInfo {
|
||||
return getIndexFields().stream().anyMatch(IndexField::isHashed);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if a wildcard index field is present.
|
||||
* @since 3.3
|
||||
*/
|
||||
public boolean isWildcard() {
|
||||
return getIndexFields().stream().anyMatch(IndexField::isWildcard);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
|
||||
@@ -303,4 +328,5 @@ public class IndexInfo {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -25,6 +25,16 @@ import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link IndexResolver} finds those {@link IndexDefinition}s to be created for a given class.
|
||||
* <p>
|
||||
* The {@link IndexResolver} considers index annotations like {@link Indexed}, {@link GeoSpatialIndexed},
|
||||
* {@link HashIndexed}, {@link TextIndexed} and {@link WildcardIndexed} on properties as well as {@link CompoundIndex}
|
||||
* and {@link WildcardIndexed} on types.
|
||||
* <p>
|
||||
* Unless specified otherwise the index name will be created out of the keys/path involved in the index. <br />
|
||||
* {@link TextIndexed} properties are collected into a single index that covers the detected fields. <br />
|
||||
* {@link java.util.Map} like structures, unless annotated with {@link WildcardIndexed}, are skipped because the
|
||||
* {@link java.util.Map.Entry#getKey() map key}, which cannot be resolved from static metadata, needs to be part of the
|
||||
* index.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Thomas Darimont
|
||||
|
||||
@@ -19,8 +19,9 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
@@ -53,7 +54,7 @@ import com.mongodb.MongoException;
|
||||
*/
|
||||
public class MongoPersistentEntityIndexCreator implements ApplicationListener<MappingContextEvent<?, ?>> {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexCreator.class);
|
||||
private static final Log LOGGER = LogFactory.getLog(MongoPersistentEntityIndexCreator.class);
|
||||
|
||||
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
|
||||
private final IndexOperationsProvider indexOperationsProvider;
|
||||
@@ -207,8 +208,10 @@ public class MongoPersistentEntityIndexCreator implements ApplicationListener<Ma
|
||||
orElse(null);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug(
|
||||
String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()), e);
|
||||
if(LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(
|
||||
String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()), e);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -27,8 +27,9 @@ import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.data.mapping.Association;
|
||||
@@ -46,6 +47,7 @@ import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.Document;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.mongodb.util.DotPath;
|
||||
import org.springframework.data.spel.EvaluationContextProvider;
|
||||
@@ -76,7 +78,7 @@ import org.springframework.util.StringUtils;
|
||||
*/
|
||||
public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistentEntityIndexResolver.class);
|
||||
private static final Log LOGGER = LogFactory.getLog(MongoPersistentEntityIndexResolver.class);
|
||||
private static final SpelExpressionParser PARSER = new SpelExpressionParser();
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -118,9 +120,12 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
Assert.notNull(document, () -> String
|
||||
.format("Entity %s is not a collection root. Make sure to annotate it with @Document!", root.getName()));
|
||||
|
||||
verifyWildcardIndexedProjection(root);
|
||||
|
||||
List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
String collection = root.getCollection();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root));
|
||||
indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions("", collection, root));
|
||||
indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection));
|
||||
|
||||
root.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
@@ -131,10 +136,32 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return indexInformation;
|
||||
}
|
||||
|
||||
private void verifyWildcardIndexedProjection(MongoPersistentEntity<?> entity) {
|
||||
|
||||
entity.doWithAll(it -> {
|
||||
|
||||
if (it.isAnnotationPresent(WildcardIndexed.class)) {
|
||||
|
||||
WildcardIndexed indexed = it.getRequiredAnnotation(WildcardIndexed.class);
|
||||
|
||||
if (!ObjectUtils.isEmpty(indexed.wildcardProjection())) {
|
||||
|
||||
throw new MappingException(String.format(
|
||||
"WildcardIndexed.wildcardProjection cannot be used on nested paths. Offending property: %s.%s",
|
||||
entity.getName(), it.getName()));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void potentiallyAddIndexForProperty(MongoPersistentEntity<?> root, MongoPersistentProperty persistentProperty,
|
||||
List<IndexDefinitionHolder> indexes, CycleGuard guard) {
|
||||
|
||||
try {
|
||||
if (isMapWithoutWildcardIndex(persistentProperty)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty),
|
||||
persistentProperty.isUnwrapped() ? "" : persistentProperty.getFieldName(), Path.of(persistentProperty),
|
||||
@@ -147,7 +174,9 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexes.addAll(indexDefinitions);
|
||||
}
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
if (LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,17 +191,18 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
* @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property
|
||||
* types. Will never be {@code null}.
|
||||
*/
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass( TypeInformation<?> type, String dotPath,
|
||||
Path path, String collection, CycleGuard guard) {
|
||||
private List<IndexDefinitionHolder> resolveIndexForClass(TypeInformation<?> type, String dotPath, Path path,
|
||||
String collection, CycleGuard guard) {
|
||||
|
||||
return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(type), dotPath, path, collection, guard);
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> resolveIndexForEntity(MongoPersistentEntity<?> entity, String dotPath,
|
||||
Path path, String collection, CycleGuard guard) {
|
||||
private List<IndexDefinitionHolder> resolveIndexForEntity(MongoPersistentEntity<?> entity, String dotPath, Path path,
|
||||
String collection, CycleGuard guard) {
|
||||
|
||||
List<IndexDefinitionHolder> indexInformation = new ArrayList<>();
|
||||
indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity));
|
||||
indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions(dotPath, collection, entity));
|
||||
|
||||
entity.doWithProperties((PropertyHandler<MongoPersistentProperty>) property -> this
|
||||
.guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard));
|
||||
@@ -194,17 +224,21 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
Path propertyPath = path.append(persistentProperty);
|
||||
guard.protect(persistentProperty, propertyPath);
|
||||
|
||||
if (isMapWithoutWildcardIndex(persistentProperty)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (persistentProperty.isEntity()) {
|
||||
try {
|
||||
indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), propertyDotPath.toString(),
|
||||
propertyPath, collection, guard));
|
||||
indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty),
|
||||
propertyDotPath.toString(), propertyPath, collection, guard));
|
||||
} catch (CyclicPropertyReferenceException e) {
|
||||
LOGGER.info(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), collection,
|
||||
persistentProperty);
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(),
|
||||
collection, persistentProperty);
|
||||
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
@@ -232,6 +266,11 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
if (persistentProperty.isAnnotationPresent(HashIndexed.class)) {
|
||||
indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty));
|
||||
}
|
||||
if (persistentProperty.isAnnotationPresent(WildcardIndexed.class)) {
|
||||
indices.add(createWildcardIndexDefinition(dotPath, collection,
|
||||
persistentProperty.getRequiredAnnotation(WildcardIndexed.class),
|
||||
mappingContext.getPersistentEntity(persistentProperty)));
|
||||
}
|
||||
|
||||
return indices;
|
||||
}
|
||||
@@ -246,6 +285,18 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return createCompoundIndexDefinitions(dotPath, collection, entity);
|
||||
}
|
||||
|
||||
private List<IndexDefinitionHolder> potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection,
|
||||
MongoPersistentEntity<?> entity) {
|
||||
|
||||
if (!entity.isAnnotationPresent(WildcardIndexed.class)) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return Collections.singletonList(new IndexDefinitionHolder(dotPath,
|
||||
createWildcardIndexDefinition(dotPath, collection, entity.getRequiredAnnotation(WildcardIndexed.class), entity),
|
||||
collection));
|
||||
}
|
||||
|
||||
private Collection<? extends IndexDefinitionHolder> potentiallyCreateTextIndexDefinition(
|
||||
MongoPersistentEntity<?> root, String collection) {
|
||||
|
||||
@@ -292,9 +343,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
|
||||
}
|
||||
|
||||
private void appendTextIndexInformation(DotPath dotPath, Path path,
|
||||
TextIndexDefinitionBuilder indexDefinitionBuilder, MongoPersistentEntity<?> entity,
|
||||
TextIndexIncludeOptions includeOptions, CycleGuard guard) {
|
||||
private void appendTextIndexInformation(DotPath dotPath, Path path, TextIndexDefinitionBuilder indexDefinitionBuilder,
|
||||
MongoPersistentEntity<?> entity, TextIndexIncludeOptions includeOptions, CycleGuard guard) {
|
||||
|
||||
entity.doWithProperties(new PropertyHandler<MongoPersistentProperty>() {
|
||||
|
||||
@@ -307,12 +357,15 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
indexDefinitionBuilder.withLanguageOverride(persistentProperty.getFieldName());
|
||||
}
|
||||
|
||||
if (persistentProperty.isMap()) {
|
||||
return;
|
||||
}
|
||||
|
||||
TextIndexed indexed = persistentProperty.findAnnotation(TextIndexed.class);
|
||||
|
||||
if (includeOptions.isForce() || indexed != null || persistentProperty.isEntity()) {
|
||||
|
||||
DotPath propertyDotPath = dotPath
|
||||
.append(persistentProperty.getFieldName());
|
||||
DotPath propertyDotPath = dotPath.append(persistentProperty.getFieldName());
|
||||
|
||||
Path propertyPath = path.append(persistentProperty);
|
||||
|
||||
@@ -406,6 +459,32 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, String collection,
|
||||
WildcardIndexed index, @Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
WildcardIndex indexDefinition = new WildcardIndex(dotPath);
|
||||
|
||||
if (StringUtils.hasText(index.wildcardProjection()) && ObjectUtils.isEmpty(dotPath)) {
|
||||
indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity));
|
||||
}
|
||||
|
||||
if (!index.useGeneratedName()) {
|
||||
indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null));
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(index.partialFilter())) {
|
||||
indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity));
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(index.collation())) {
|
||||
indexDefinition.collation(evaluateCollation(index.collation(), entity));
|
||||
} else if (entity != null && entity.hasCollation()) {
|
||||
indexDefinition.collation(entity.getCollation());
|
||||
}
|
||||
|
||||
return new IndexDefinitionHolder(dotPath, indexDefinition, collection);
|
||||
}
|
||||
|
||||
private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString,
|
||||
PersistentEntity<?, ?> entity) {
|
||||
|
||||
@@ -510,6 +589,33 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return PartialIndexFilter.of(BsonUtils.parse(filterExpression, null));
|
||||
}
|
||||
|
||||
private org.bson.Document evaluateWildcardProjection(String projectionExpression, PersistentEntity<?, ?> entity) {
|
||||
|
||||
Object result = evaluate(projectionExpression, getEvaluationContextForProperty(entity));
|
||||
|
||||
if (result instanceof org.bson.Document) {
|
||||
return (org.bson.Document) result;
|
||||
}
|
||||
|
||||
return BsonUtils.parse(projectionExpression, null);
|
||||
}
|
||||
|
||||
private Collation evaluateCollation(String collationExpression, PersistentEntity<?, ?> entity) {
|
||||
|
||||
Object result = evaluate(collationExpression, getEvaluationContextForProperty(entity));
|
||||
if (result instanceof org.bson.Document) {
|
||||
return Collation.from((org.bson.Document) result);
|
||||
}
|
||||
if (result instanceof Collation) {
|
||||
return (Collation) result;
|
||||
}
|
||||
if (result instanceof String) {
|
||||
return Collation.parse(result.toString());
|
||||
}
|
||||
throw new IllegalStateException("Cannot parse collation " + result);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given
|
||||
* {@link MongoPersistentProperty}.
|
||||
@@ -657,8 +763,8 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
propertyDotPath));
|
||||
}
|
||||
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), collection,
|
||||
property);
|
||||
List<IndexDefinitionHolder> indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(),
|
||||
collection, property);
|
||||
|
||||
if (!indexDefinitions.isEmpty()) {
|
||||
indexes.addAll(indexDefinitions);
|
||||
@@ -707,6 +813,10 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
return expression.getValue(evaluationContext, Object.class);
|
||||
}
|
||||
|
||||
private static boolean isMapWithoutWildcardIndex(MongoPersistentProperty property) {
|
||||
return property.isMap() && !property.isAnnotationPresent(WildcardIndexed.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CycleGuard} holds information about properties and the paths for accessing those. This information is used
|
||||
* to detect potential cycles within the references.
|
||||
@@ -998,6 +1108,11 @@ public class MongoPersistentEntityIndexResolver implements IndexResolver {
|
||||
public org.bson.Document getIndexOptions() {
|
||||
return indexDefinition.getIndexOptions();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "IndexDefinitionHolder{" + "indexKeys=" + getIndexKeys() + '}';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -23,8 +23,9 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.UncategorizedMongoDbException;
|
||||
@@ -47,7 +48,7 @@ import com.mongodb.MongoException;
|
||||
*/
|
||||
public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoPersistentEntityIndexCreator.class);
|
||||
private static final Log LOGGER = LogFactory.getLog(ReactiveMongoPersistentEntityIndexCreator.class);
|
||||
|
||||
private final Map<Class<?>, Boolean> classesSeen = new ConcurrentHashMap<Class<?>, Boolean>();
|
||||
private final MongoMappingContext mappingContext;
|
||||
@@ -174,9 +175,11 @@ public class ReactiveMongoPersistentEntityIndexCreator {
|
||||
.filter(indexInfo -> ObjectUtils.nullSafeEquals(indexNameToLookUp, indexInfo.getName())) //
|
||||
.next() //
|
||||
.doOnError(e -> {
|
||||
LOGGER.debug(
|
||||
String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()),
|
||||
e);
|
||||
if(LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(
|
||||
String.format("Failed to load index information for collection '%s'.", indexDefinition.getCollection()),
|
||||
e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,198 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link WildcardIndex} is a specific {@link Index} that can be used to include all fields into an index based on the
|
||||
* {@code $**" : 1} pattern on a root object (the one typically carrying the
|
||||
* {@link org.springframework.data.mongodb.core.mapping.Document} annotation). On those it is possible to use
|
||||
* {@link #wildcardProjectionInclude(String...)} and {@link #wildcardProjectionExclude(String...)} to define specific
|
||||
* paths for in-/exclusion.
|
||||
* <br />
|
||||
* It can also be used to define an index on a specific field path and its subfields, e.g.
|
||||
* {@code "path.to.field.$**" : 1}. <br />
|
||||
* Note that {@literal wildcardProjections} are not allowed in this case.
|
||||
* <br />
|
||||
* <strong>LIMITATIONS</strong><br />
|
||||
* <ul>
|
||||
* <li>{@link #unique() Unique} and {@link #expire(long) ttl} options are not supported.</li>
|
||||
* <li>Keys used for sharding must not be included</li>
|
||||
* <li>Cannot be used to generate any type of geo index.</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href= "https://docs.mongodb.com/manual/core/index-wildcard/">MongoDB Reference Documentation: Wildcard
|
||||
* Indexes/</a>
|
||||
* @since 3.3
|
||||
*/
|
||||
public class WildcardIndex extends Index {
|
||||
|
||||
private @Nullable String fieldName;
|
||||
private final Map<String, Object> wildcardProjection = new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link WildcardIndex} using {@code $**}.
|
||||
*/
|
||||
public WildcardIndex() {}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link WildcardIndex} for the given {@literal path}. If no {@literal path} is provided the
|
||||
* index will be considered a root one using {@code $**}. <br />
|
||||
* <strong>NOTE</strong> {@link #wildcardProjectionInclude(String...)}, {@link #wildcardProjectionExclude(String...)}
|
||||
* can only be used for top level index definitions having an {@literal empty} or {@literal null} path.
|
||||
*
|
||||
* @param path can be {@literal null}. If {@literal null} all fields will be indexed.
|
||||
*/
|
||||
public WildcardIndex(@Nullable String path) {
|
||||
this.fieldName = path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include the {@code _id} field in {@literal wildcardProjection}.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex includeId() {
|
||||
|
||||
wildcardProjection.put("_id", 1);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the index name to use.
|
||||
*
|
||||
* @param name
|
||||
* @return this.
|
||||
*/
|
||||
@Override
|
||||
public WildcardIndex named(String name) {
|
||||
|
||||
super.named(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unique option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index unique() {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'unique'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* ttl option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index expire(long seconds) {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* ttl option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index expire(long value, TimeUnit timeUnit) {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* ttl option is not supported.
|
||||
*
|
||||
* @throws UnsupportedOperationException not supported for wildcard indexes.
|
||||
*/
|
||||
@Override
|
||||
public Index expire(Duration duration) {
|
||||
throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add fields to be included from indexing via {@code wildcardProjection}. <br />
|
||||
* This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes.
|
||||
*
|
||||
* @param paths must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex wildcardProjectionInclude(String... paths) {
|
||||
|
||||
for (String path : paths) {
|
||||
wildcardProjection.put(path, 1);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add fields to be excluded from indexing via {@code wildcardProjection}. <br />
|
||||
* This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes.
|
||||
*
|
||||
* @param paths must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex wildcardProjectionExclude(String... paths) {
|
||||
|
||||
for (String path : paths) {
|
||||
wildcardProjection.put(path, 0);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the fields to be in-/excluded from indexing via {@code wildcardProjection}. <br />
|
||||
* This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes.
|
||||
*
|
||||
* @param includeExclude must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WildcardIndex wildcardProjection(Map<String, Object> includeExclude) {
|
||||
|
||||
wildcardProjection.putAll(includeExclude);
|
||||
return this;
|
||||
}
|
||||
|
||||
private String getTargetFieldName() {
|
||||
return StringUtils.hasText(fieldName) ? (fieldName + ".$**") : "$**";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getIndexKeys() {
|
||||
return new Document(getTargetFieldName(), 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getIndexOptions() {
|
||||
|
||||
Document options = new Document(super.getIndexOptions());
|
||||
if (!CollectionUtils.isEmpty(wildcardProjection)) {
|
||||
options.put("wildcardProjection", new Document(wildcardProjection));
|
||||
}
|
||||
return options;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.index;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Annotation for an entity or property that should be used as key for a
|
||||
* <a href="https://docs.mongodb.com/manual/core/index-wildcard/">Wildcard Index</a>. <br />
|
||||
* If placed on a {@link ElementType#TYPE type} that is a root level domain entity (one having an
|
||||
* {@link org.springframework.data.mongodb.core.mapping.Document} annotation) will advise the index creator to create a
|
||||
* wildcard index for it.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* @Document
|
||||
* @WildcardIndexed
|
||||
* public class Product {
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* db.product.createIndex({ "$**" : 1 } , {})
|
||||
* </pre>
|
||||
*
|
||||
* {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index.
|
||||
*
|
||||
* <pre class="code">
|
||||
*
|
||||
* @Document
|
||||
* @WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
|
||||
* public class User {
|
||||
* private @Id String id;
|
||||
* private UserMetadata userMetadata;
|
||||
* }
|
||||
*
|
||||
*
|
||||
* db.user.createIndex(
|
||||
* { "$**" : 1 },
|
||||
* { "wildcardProjection" :
|
||||
* { "userMetadata.age" : 0 }
|
||||
* }
|
||||
* )
|
||||
* </pre>
|
||||
*
|
||||
* Wildcard indexes can also be expressed by adding the annotation directly to the field. Please note that
|
||||
* {@literal wildcardProjection} is not allowed on nested paths.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* public class User {
|
||||
*
|
||||
* private @Id String id;
|
||||
*
|
||||
* @WildcardIndexed
|
||||
* private UserMetadata userMetadata;
|
||||
* }
|
||||
*
|
||||
*
|
||||
* db.user.createIndex({ "userMetadata.$**" : 1 }, {})
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
@Documented
|
||||
@Target({ ElementType.TYPE, ElementType.FIELD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface WildcardIndexed {
|
||||
|
||||
/**
|
||||
* Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template
|
||||
* expression}. <br />
|
||||
* <br />
|
||||
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the
|
||||
* provided name will be prefixed with the path leading to the entity.
|
||||
*
|
||||
* @return empty by default.
|
||||
*/
|
||||
String name() default "";
|
||||
|
||||
/**
|
||||
* If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults
|
||||
* to {@literal false}.
|
||||
*
|
||||
* @return {@literal false} by default.
|
||||
*/
|
||||
boolean useGeneratedName() default false;
|
||||
|
||||
/**
|
||||
* Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}. <br />
|
||||
*
|
||||
* @return empty by default.
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/index-partial/">https://docs.mongodb.com/manual/core/index-partial/</a>
|
||||
*/
|
||||
String partialFilter() default "";
|
||||
|
||||
/**
|
||||
* Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String.
|
||||
* <br />
|
||||
* <strong>NOTE: </strong>Can only be applied on root level documents.
|
||||
*
|
||||
* @return empty by default.
|
||||
*/
|
||||
String wildcardProjection() default "";
|
||||
|
||||
/**
|
||||
* Defines the collation to apply.
|
||||
*
|
||||
* @return an empty {@link String} by default.
|
||||
*/
|
||||
String collation() default "";
|
||||
}
|
||||
@@ -17,8 +17,12 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.data.annotation.Id;
|
||||
@@ -28,6 +32,9 @@ import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PropertyHandler;
|
||||
import org.springframework.data.mapping.model.BasicPersistentEntity;
|
||||
import org.springframework.data.mongodb.MongoCollectionUtils;
|
||||
import org.springframework.data.mongodb.util.encryption.EncryptionUtils;
|
||||
import org.springframework.data.spel.ExpressionDependencies;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.Expression;
|
||||
@@ -212,6 +219,11 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
return super.getEvaluationContext(rootObject);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EvaluationContext getEvaluationContext(Object rootObject, ExpressionDependencies dependencies) {
|
||||
return super.getEvaluationContext(rootObject, dependencies);
|
||||
}
|
||||
|
||||
private void verifyFieldUniqueness() {
|
||||
|
||||
AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler();
|
||||
@@ -360,6 +372,32 @@ public class BasicMongoPersistentEntity<T> extends BasicPersistentEntity<T, Mong
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> getEncryptionKeyIds() {
|
||||
|
||||
Encrypted encrypted = findAnnotation(Encrypted.class);
|
||||
if (encrypted == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (ObjectUtils.isEmpty(encrypted.keyId())) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
Lazy<EvaluationContext> evaluationContext = Lazy.of(() -> {
|
||||
|
||||
EvaluationContext ctx = getEvaluationContext(null);
|
||||
ctx.setVariable("target", getType().getSimpleName());
|
||||
return ctx;
|
||||
});
|
||||
|
||||
List<Object> target = new ArrayList<>();
|
||||
for (String keyId : encrypted.keyId()) {
|
||||
target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext));
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 1.6
|
||||
|
||||
@@ -16,13 +16,17 @@
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.annotation.Id;
|
||||
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty;
|
||||
@@ -30,7 +34,12 @@ import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.Property;
|
||||
import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.SimpleTypeHolder;
|
||||
import org.springframework.data.mongodb.util.encryption.EncryptionUtils;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.expression.spel.support.StandardEvaluationContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
@@ -41,13 +50,14 @@ import org.springframework.util.StringUtils;
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty<MongoPersistentProperty>
|
||||
implements MongoPersistentProperty {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(BasicMongoPersistentProperty.class);
|
||||
private static final Log LOG = LogFactory.getLog(BasicMongoPersistentProperty.class);
|
||||
|
||||
private static final String ID_FIELD_NAME = "_id";
|
||||
public static final String ID_FIELD_NAME = "_id";
|
||||
private static final String LANGUAGE_FIELD_NAME = "language";
|
||||
private static final Set<Class<?>> SUPPORTED_ID_TYPES = new HashSet<Class<?>>();
|
||||
private static final Set<String> SUPPORTED_ID_PROPERTY_NAMES = new HashSet<String>();
|
||||
@@ -83,9 +93,11 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
|
||||
String annotatedName = getAnnotatedFieldName();
|
||||
if (!ID_FIELD_NAME.equals(annotatedName)) {
|
||||
LOG.warn(
|
||||
"Customizing field name for id property '{}.{}' is not allowed! Custom name ('{}') will not be considered!",
|
||||
owner.getName(), getName(), annotatedName);
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(
|
||||
"Customizing field name for id property '%s.%s' is not allowed! Custom name ('%s') will not be considered!",
|
||||
owner.getName(), getName(), annotatedName));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -114,7 +126,7 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
*/
|
||||
@Override
|
||||
public boolean isExplicitIdProperty() {
|
||||
return isAnnotationPresent(Id.class);
|
||||
return super.isIdProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -189,7 +201,7 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
* {@link org.springframework.data.mongodb.core.mapping.Field#value()} present.
|
||||
* @since 1.7
|
||||
*/
|
||||
protected boolean hasExplicitFieldName() {
|
||||
public boolean hasExplicitFieldName() {
|
||||
return StringUtils.hasText(getAnnotatedFieldName());
|
||||
}
|
||||
|
||||
@@ -214,6 +226,19 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return annotation != null ? annotation.order() : Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#skipNullValues()
|
||||
*/
|
||||
@Override
|
||||
public boolean writeNullValues() {
|
||||
|
||||
org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation(
|
||||
org.springframework.data.mongodb.core.mapping.Field.class);
|
||||
|
||||
return annotation != null && annotation.write() == Field.Write.ALWAYS;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mapping.model.AbstractPersistentProperty#createAssociation()
|
||||
@@ -231,6 +256,15 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return isAnnotationPresent(DBRef.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isDocumentReference()
|
||||
*/
|
||||
@Override
|
||||
public boolean isDocumentReference() {
|
||||
return isAnnotationPresent(DocumentReference.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDBRef()
|
||||
@@ -240,6 +274,16 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
return findAnnotation(DBRef.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDocumentReference()
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public DocumentReference getDocumentReference() {
|
||||
return findAnnotation(DocumentReference.class);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isLanguageProperty()
|
||||
@@ -266,4 +310,44 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope
|
||||
public boolean isTextScoreProperty() {
|
||||
return isAnnotationPresent(TextScore.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link EvaluationContext} for a specific root object.
|
||||
*
|
||||
* @param rootObject can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public EvaluationContext getEvaluationContext(@Nullable Object rootObject) {
|
||||
|
||||
if (getOwner() instanceof BasicMongoPersistentEntity) {
|
||||
return ((BasicMongoPersistentEntity) getOwner()).getEvaluationContext(rootObject);
|
||||
}
|
||||
return rootObject != null ? new StandardEvaluationContext(rootObject) : new StandardEvaluationContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> getEncryptionKeyIds() {
|
||||
|
||||
Encrypted encrypted = findAnnotation(Encrypted.class);
|
||||
if (encrypted == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (ObjectUtils.isEmpty(encrypted.keyId())) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
Lazy<EvaluationContext> evaluationContext = Lazy.of(() -> {
|
||||
EvaluationContext ctx = getEvaluationContext(null);
|
||||
ctx.setVariable("target", getOwner().getType().getSimpleName() + "." + getName());
|
||||
return ctx;
|
||||
});
|
||||
|
||||
List<Object> target = new ArrayList<>();
|
||||
for (String keyId : encrypted.keyId()) {
|
||||
target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext));
|
||||
}
|
||||
return target;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
private boolean dbRefResolved;
|
||||
private @Nullable DBRef dbref;
|
||||
private @Nullable String fieldName;
|
||||
private @Nullable Boolean writeNullValues;
|
||||
private @Nullable Class<?> fieldType;
|
||||
private @Nullable Boolean usePropertyAccess;
|
||||
private @Nullable Boolean isTransient;
|
||||
@@ -90,6 +91,20 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#skipNullValues()
|
||||
*/
|
||||
@Override
|
||||
public boolean writeNullValues() {
|
||||
|
||||
if (this.writeNullValues == null) {
|
||||
this.writeNullValues = super.writeNullValues();
|
||||
}
|
||||
|
||||
return this.writeNullValues;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#getFieldType()
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
/**
|
||||
* A custom pointer to a linked document to be used along with {@link DocumentReference} for storing the linkage value.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface DocumentPointer<T> {
|
||||
|
||||
/**
|
||||
* The actual pointer value. This can be any simple type, like a {@link String} or {@link org.bson.types.ObjectId} or
|
||||
* a {@link org.bson.Document} holding more information like the target collection, multiple fields forming the key,
|
||||
* etc.
|
||||
*
|
||||
* @return the value stored in MongoDB and used for constructing the {@link DocumentReference#lookup() lookup query}.
|
||||
*/
|
||||
T getPointer();
|
||||
}
|
||||
@@ -0,0 +1,132 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
|
||||
/**
|
||||
* A {@link DocumentReference} allows referencing entities in MongoDB using a flexible schema. While the goal is the
|
||||
* same as when using {@link DBRef}, the store representation is different. The reference can be anything, a single
|
||||
* value, an entire {@link org.bson.Document}, basically everything that can be stored in MongoDB. By default, the
|
||||
* mapping layer will use the referenced entities {@literal id} value for storage and retrieval.
|
||||
*
|
||||
* <pre class="code">
|
||||
* public class Account {
|
||||
* private String id;
|
||||
* private Float total;
|
||||
* }
|
||||
*
|
||||
* public class Person {
|
||||
* private String id;
|
||||
* @DocumentReference
|
||||
* private List<Account> accounts;
|
||||
* }
|
||||
*
|
||||
* Account account = ...
|
||||
*
|
||||
* mongoTemplate.insert(account);
|
||||
*
|
||||
* template.update(Person.class)
|
||||
* .matching(where("id").is(...))
|
||||
* .apply(new Update().push("accounts").value(account))
|
||||
* .first();
|
||||
* </pre>
|
||||
*
|
||||
* {@link #lookup()} allows defining a query filter that is independent from the {@literal _id} field and in combination
|
||||
* with {@link org.springframework.data.convert.WritingConverter writing converters} offers a flexible way of defining
|
||||
* references between entities.
|
||||
*
|
||||
* <pre class="code">
|
||||
* public class Book {
|
||||
* private ObjectId id;
|
||||
* private String title;
|
||||
*
|
||||
* @Field("publisher_ac") @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
|
||||
* }
|
||||
*
|
||||
* public class Publisher {
|
||||
*
|
||||
* private ObjectId id;
|
||||
* private String acronym;
|
||||
* private String name;
|
||||
*
|
||||
* @DocumentReference(lazy = true) private List<Book> books;
|
||||
* }
|
||||
*
|
||||
* @WritingConverter
|
||||
* public class PublisherReferenceConverter implements Converter<Publisher, DocumentPointer<String>> {
|
||||
*
|
||||
* public DocumentPointer<String> convert(Publisher source) {
|
||||
* return () -> source.getAcronym();
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/database-references/#std-label-document-references">MongoDB
|
||||
* Reference Documentation</a>
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.FIELD })
|
||||
@Reference
|
||||
public @interface DocumentReference {
|
||||
|
||||
/**
|
||||
* The database the referenced entity resides in. Uses the default database provided by
|
||||
* {@link org.springframework.data.mongodb.MongoDatabaseFactory} if empty.
|
||||
*
|
||||
* @see MongoDatabaseFactory#getMongoDatabase()
|
||||
* @see MongoDatabaseFactory#getMongoDatabase(String)
|
||||
*/
|
||||
String db() default "";
|
||||
|
||||
/**
|
||||
* The collection the referenced entity resides in. Defaults to the collection of the referenced entity type.
|
||||
*
|
||||
* @see MongoPersistentEntity#getCollection()
|
||||
*/
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
* The single document lookup query. In case of an {@link java.util.Collection} or {@link java.util.Map} property the
|
||||
* individual lookups are combined via an {@code $or} operator. {@code target} points to the source value (or
|
||||
* document) stored at the reference property. Properties of {@code target} can be used to define the reference query.
|
||||
*
|
||||
* @return an {@literal _id} based lookup.
|
||||
*/
|
||||
String lookup() default "{ '_id' : ?#{#target} }";
|
||||
|
||||
/**
|
||||
* A specific sort.
|
||||
*/
|
||||
String sort() default "";
|
||||
|
||||
/**
|
||||
* Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}.
|
||||
*
|
||||
* @return {@literal false} by default.
|
||||
*/
|
||||
boolean lazy() default false;
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* {@link Encrypted} provides data required for MongoDB Client Side Field Level Encryption that is applied during schema
|
||||
* resolution. It can be applied on top level (typically those types annotated with {@link Document} to provide the
|
||||
* {@literal encryptMetadata}.
|
||||
*
|
||||
* <pre class="code">
|
||||
* @Document
|
||||
* @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==")
|
||||
* public class Patient {
|
||||
* private ObjectId id;
|
||||
* private String name;
|
||||
*
|
||||
* @Field("publisher_ac")
|
||||
* @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
|
||||
* }
|
||||
*
|
||||
* "encryptMetadata": {
|
||||
* "keyId": [
|
||||
* {
|
||||
* "$binary": {
|
||||
* "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
|
||||
* "subType": "04"
|
||||
* }
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* <br />
|
||||
* On property level it is used for deriving field specific {@literal encrypt} settings.
|
||||
*
|
||||
* <pre class="code">
|
||||
* public class Patient {
|
||||
* private ObjectId id;
|
||||
* private String name;
|
||||
*
|
||||
* @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
|
||||
* private String ssn;
|
||||
* }
|
||||
*
|
||||
* "ssn" : {
|
||||
* "encrypt": {
|
||||
* "keyId": [
|
||||
* {
|
||||
* "$binary": {
|
||||
* "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
|
||||
* "subType": "04"
|
||||
* }
|
||||
* }
|
||||
* ],
|
||||
* "algorithm" : "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic",
|
||||
* "bsonType" : "string"
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.TYPE, ElementType.FIELD })
|
||||
public @interface Encrypted {
|
||||
|
||||
/**
|
||||
* Get the {@code keyId} to use. The value must resolve to either the UUID representation of the key or a base64
|
||||
* encoded value representing the UUID value.
|
||||
* <br />
|
||||
* On {@link ElementType#TYPE} level the {@link #keyId()} can be left empty if explicitly set for fields. <br />
|
||||
* On {@link ElementType#FIELD} level the {@link #keyId()} can be left empty if inherited from
|
||||
* {@literal encryptMetadata}.
|
||||
*
|
||||
* @return the key id to use. May contain a parsable {@link org.springframework.expression.Expression expression}. In
|
||||
* this case the {@code #target} variable will hold the target element name.
|
||||
*/
|
||||
String[] keyId() default {};
|
||||
|
||||
/**
|
||||
* Set the algorithm to use.
|
||||
* <br />
|
||||
* On {@link ElementType#TYPE} level the {@link #algorithm()} can be left empty if explicitly set for fields. <br />
|
||||
* On {@link ElementType#FIELD} level the {@link #algorithm()} can be left empty if inherited from
|
||||
* {@literal encryptMetadata}.
|
||||
*
|
||||
* @return the encryption algorithm.
|
||||
* @see org.springframework.data.mongodb.core.EncryptionAlgorithms
|
||||
*/
|
||||
String algorithm() default "";
|
||||
}
|
||||
@@ -28,6 +28,7 @@ import org.springframework.core.annotation.AliasFor;
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
@Documented
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@@ -65,4 +66,34 @@ public @interface Field {
|
||||
* @since 2.2
|
||||
*/
|
||||
FieldType targetType() default FieldType.IMPLICIT;
|
||||
|
||||
/**
|
||||
* Write rules when to include a property value upon conversion. If set to {@link Write#NON_NULL} (default)
|
||||
* {@literal null} values are not written to the target {@code Document}. Setting the value to {@link Write#ALWAYS}
|
||||
* explicitly adds an entry for the given field holding {@literal null} as a value {@code 'fieldName' : null }.
|
||||
* <br />
|
||||
* <strong>NOTE</strong>Setting the value to {@link Write#ALWAYS} may lead to increased document size.
|
||||
*
|
||||
* @return {@link Write#NON_NULL} by default.
|
||||
* @since 3.3
|
||||
*/
|
||||
Write write() default Write.NON_NULL;
|
||||
|
||||
/**
|
||||
* Enumeration of write strategies to define when a property is included for write conversion.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
enum Write {
|
||||
|
||||
/**
|
||||
* Value that indicates that property is to be always included, independent of value of the property.
|
||||
*/
|
||||
ALWAYS,
|
||||
|
||||
/**
|
||||
* Value that indicates that only properties with non-{@literal null} values are to be included.
|
||||
*/
|
||||
NON_NULL
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +46,9 @@ public class MongoMappingContext extends AbstractMappingContext<MongoPersistentE
|
||||
private FieldNamingStrategy fieldNamingStrategy = DEFAULT_NAMING_STRATEGY;
|
||||
private boolean autoIndexCreation = false;
|
||||
|
||||
@Nullable
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoMappingContext}.
|
||||
*/
|
||||
@@ -103,6 +106,8 @@ public class MongoMappingContext extends AbstractMappingContext<MongoPersistentE
|
||||
*/
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
|
||||
|
||||
this.applicationContext = applicationContext;
|
||||
super.setApplicationContext(applicationContext);
|
||||
}
|
||||
|
||||
@@ -145,4 +150,5 @@ public class MongoMappingContext extends AbstractMappingContext<MongoPersistentE
|
||||
|
||||
return new UnwrappedMongoPersistentEntity<>(entity, new UnwrapEntityContext(persistentProperty));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.model.MutablePersistentEntity;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -102,4 +104,11 @@ public interface MongoPersistentEntity<T> extends MutablePersistentEntity<T, Mon
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified.
|
||||
* {@literal null} no {@link Encrypted} annotation found.
|
||||
* @since 3.3
|
||||
*/
|
||||
@Nullable
|
||||
Collection<Object> getEncryptionKeyIds();
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Id;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
@@ -28,6 +30,7 @@ import org.springframework.lang.Nullable;
|
||||
* @author Patryk Wasik
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
public interface MongoPersistentProperty extends PersistentProperty<MongoPersistentProperty> {
|
||||
|
||||
@@ -38,6 +41,13 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
String getFieldName();
|
||||
|
||||
/**
|
||||
* Returns whether the property uses an annotated field name through {@link Field}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
boolean hasExplicitFieldName();
|
||||
|
||||
/**
|
||||
* Returns the {@link Class Java FieldType} of the field a property is persisted to.
|
||||
*
|
||||
@@ -54,6 +64,15 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
int getFieldOrder();
|
||||
|
||||
/**
|
||||
* Returns whether the property should be written to the database if its value is {@literal null}.
|
||||
*
|
||||
* @return
|
||||
* @since 3.3
|
||||
* @see Field.Write
|
||||
*/
|
||||
boolean writeNullValues();
|
||||
|
||||
/**
|
||||
* Returns whether the property is a {@link com.mongodb.DBRef}. If this returns {@literal true} you can expect
|
||||
* {@link #getDBRef()} to return an non-{@literal null} value.
|
||||
@@ -62,6 +81,15 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
*/
|
||||
boolean isDbReference();
|
||||
|
||||
/**
|
||||
* Returns whether the property is a {@link DocumentReference}. If this returns {@literal true} you can expect
|
||||
* {@link #getDocumentReference()} to return an non-{@literal null} value.
|
||||
*
|
||||
* @return
|
||||
* @since 3.3
|
||||
*/
|
||||
boolean isDocumentReference();
|
||||
|
||||
/**
|
||||
* Returns whether the property is explicitly marked as an identifier property of the owning {@link PersistentEntity}.
|
||||
* A property is an explicit id property if it is annotated with @see {@link Id}.
|
||||
@@ -105,6 +133,16 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
@Nullable
|
||||
DBRef getDBRef();
|
||||
|
||||
/**
|
||||
* Returns the {@link DocumentReference} if the property is a reference.
|
||||
*
|
||||
* @see #isDocumentReference()
|
||||
* @return {@literal null} if not present.
|
||||
* @since 3.3
|
||||
*/
|
||||
@Nullable
|
||||
DocumentReference getDocumentReference();
|
||||
|
||||
/**
|
||||
* Returns whether property access shall be used for reading the property value. This means it will use the getter
|
||||
* instead of field access.
|
||||
@@ -131,6 +169,13 @@ public interface MongoPersistentProperty extends PersistentProperty<MongoPersist
|
||||
return isEntity() && isAnnotationPresent(Unwrapped.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified.
|
||||
* {@literal null} no {@link Encrypted} annotation found.
|
||||
* @since 3.3
|
||||
*/
|
||||
Collection<Object> getEncryptionKeyIds();
|
||||
|
||||
/**
|
||||
* Simple {@link Converter} implementation to transform a {@link MongoPersistentProperty} into its field name.
|
||||
*
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.springframework.data.util.Predicates;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* Utility to translate a {@link MongoPersistentProperty} into a corresponding property from a different
|
||||
* {@link MongoPersistentEntity} by looking it up by name.
|
||||
* <p>
|
||||
* Mainly used within the framework.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 3.4
|
||||
*/
|
||||
public class PersistentPropertyTranslator {
|
||||
|
||||
/**
|
||||
* Translate a {@link MongoPersistentProperty} into a corresponding property from a different
|
||||
* {@link MongoPersistentEntity}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @return the translated property. Can be the original {@code property}.
|
||||
*/
|
||||
public MongoPersistentProperty translate(MongoPersistentProperty property) {
|
||||
return property;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link PersistentPropertyTranslator}.
|
||||
*
|
||||
* @param targetEntity must not be {@literal null}.
|
||||
* @return the property translator to use.
|
||||
*/
|
||||
public static PersistentPropertyTranslator create(@Nullable MongoPersistentEntity<?> targetEntity) {
|
||||
return create(targetEntity, Predicates.isTrue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link PersistentPropertyTranslator} accepting a {@link Predicate filter predicate} whether the
|
||||
* translation should happen at all.
|
||||
*
|
||||
* @param targetEntity must not be {@literal null}.
|
||||
* @param translationFilter must not be {@literal null}.
|
||||
* @return the property translator to use.
|
||||
*/
|
||||
public static PersistentPropertyTranslator create(@Nullable MongoPersistentEntity<?> targetEntity,
|
||||
Predicate<MongoPersistentProperty> translationFilter) {
|
||||
return targetEntity != null ? new EntityPropertyTranslator(targetEntity, translationFilter)
|
||||
: new PersistentPropertyTranslator();
|
||||
}
|
||||
|
||||
private static class EntityPropertyTranslator extends PersistentPropertyTranslator {
|
||||
|
||||
private final MongoPersistentEntity<?> targetEntity;
|
||||
private final Predicate<MongoPersistentProperty> translationFilter;
|
||||
|
||||
EntityPropertyTranslator(MongoPersistentEntity<?> targetEntity,
|
||||
Predicate<MongoPersistentProperty> translationFilter) {
|
||||
this.targetEntity = targetEntity;
|
||||
this.translationFilter = translationFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty translate(MongoPersistentProperty property) {
|
||||
|
||||
if (!translationFilter.test(property)) {
|
||||
return property;
|
||||
}
|
||||
|
||||
MongoPersistentProperty targetProperty = targetEntity.getPersistentProperty(property.getName());
|
||||
return targetProperty != null ? targetProperty : property;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
/*
|
||||
* Copyright 2021-2022 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.core.annotation.AliasFor;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
|
||||
/**
|
||||
* Identifies a domain object to be persisted to a MongoDB Time Series collection.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href="https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
@Inherited
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ ElementType.TYPE })
|
||||
@Document
|
||||
public @interface TimeSeries {
|
||||
|
||||
/**
|
||||
* The collection the document representing the entity is supposed to be stored in. If not configured, a default
|
||||
* collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically
|
||||
* calculate the collection based on a per operation basis.
|
||||
*
|
||||
* @return the name of the collection to be used.
|
||||
* @see Document#collection()
|
||||
*/
|
||||
@AliasFor(annotation = Document.class, attribute = "collection")
|
||||
String collection() default "";
|
||||
|
||||
/**
|
||||
* Name of the property which contains the date in each time series document. <br />
|
||||
* Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping
|
||||
* process.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String timeField();
|
||||
|
||||
/**
|
||||
* The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor
|
||||
* {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}. <br />
|
||||
* Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping
|
||||
* process.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String metaField() default "";
|
||||
|
||||
/**
|
||||
* Select the {@link Granularity granularity} parameter to define how data in the time series collection is organized.
|
||||
*
|
||||
* @return {@link Granularity#DEFAULT server default} by default.
|
||||
*/
|
||||
Granularity granularity() default Granularity.DEFAULT;
|
||||
|
||||
/**
|
||||
* Defines the collation to apply when executing a query or creating indexes.
|
||||
*
|
||||
* @return an empty {@link String} by default.
|
||||
* @see Document#collation()
|
||||
*/
|
||||
@AliasFor(annotation = Document.class, attribute = "collation")
|
||||
String collation() default "";
|
||||
|
||||
}
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Spliterator;
|
||||
@@ -107,6 +108,16 @@ class UnwrappedMongoPersistentEntity<T> implements MongoPersistentEntity<T> {
|
||||
return delegate.isConstructorArgument(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public InstanceCreatorMetadata<MongoPersistentProperty> getInstanceCreatorMetadata() {
|
||||
return delegate.getInstanceCreatorMetadata();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCreatorArgument(PersistentProperty<?> property) {
|
||||
return delegate.isCreatorArgument(property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isIdProperty(PersistentProperty<?> property) {
|
||||
return delegate.isIdProperty(property);
|
||||
@@ -323,4 +334,9 @@ class UnwrappedMongoPersistentEntity<T> implements MongoPersistentEntity<T> {
|
||||
public boolean isUnwrapped() {
|
||||
return context.getProperty().isUnwrapped();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> getEncryptionKeyIds() {
|
||||
return delegate.getEncryptionKeyIds();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ package org.springframework.data.mongodb.core.mapping;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
@@ -55,6 +56,12 @@ class UnwrappedMongoPersistentProperty implements MongoPersistentProperty {
|
||||
return context.getProperty().findAnnotation(Unwrapped.class).prefix() + delegate.getFieldName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasExplicitFieldName() {
|
||||
return delegate.hasExplicitFieldName()
|
||||
|| !ObjectUtils.isEmpty(context.getProperty().findAnnotation(Unwrapped.class).prefix());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getFieldType() {
|
||||
return delegate.getFieldType();
|
||||
@@ -65,11 +72,21 @@ class UnwrappedMongoPersistentProperty implements MongoPersistentProperty {
|
||||
return delegate.getFieldOrder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean writeNullValues() {
|
||||
return delegate.writeNullValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDbReference() {
|
||||
return delegate.isDbReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDocumentReference() {
|
||||
return delegate.isDocumentReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isExplicitIdProperty() {
|
||||
return delegate.isExplicitIdProperty();
|
||||
@@ -96,6 +113,12 @@ class UnwrappedMongoPersistentProperty implements MongoPersistentProperty {
|
||||
return delegate.getDBRef();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public DocumentReference getDocumentReference() {
|
||||
return delegate.getDocumentReference();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean usePropertyAccess() {
|
||||
return delegate.usePropertyAccess();
|
||||
@@ -131,6 +154,11 @@ class UnwrappedMongoPersistentProperty implements MongoPersistentProperty {
|
||||
return delegate.getPersistentEntityTypes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<? extends TypeInformation<?>> getPersistentEntityTypeInformation() {
|
||||
return delegate.getPersistentEntityTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Method getGetter() {
|
||||
@@ -247,6 +275,11 @@ class UnwrappedMongoPersistentProperty implements MongoPersistentProperty {
|
||||
return delegate.isUnwrapped();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> getEncryptionKeyIds() {
|
||||
return delegate.getEncryptionKeyIds();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Nullable
|
||||
public Class<?> getComponentType() {
|
||||
@@ -302,6 +335,11 @@ class UnwrappedMongoPersistentProperty implements MongoPersistentProperty {
|
||||
return delegate.getAssociationTargetType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeInformation<?> getAssociationTargetTypeInformation() {
|
||||
return delegate.getAssociationTargetTypeInformation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> PersistentPropertyAccessor<T> getAccessorForOwner(T owner) {
|
||||
return delegate.getAccessorForOwner(owner);
|
||||
|
||||
@@ -15,8 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.context.ApplicationListener;
|
||||
import org.springframework.core.GenericTypeResolver;
|
||||
import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
@@ -31,7 +32,7 @@ import org.springframework.data.mongodb.core.query.SerializationUtils;
|
||||
*/
|
||||
public abstract class AbstractMongoEventListener<E> implements ApplicationListener<MongoMappingEvent<?>> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(AbstractMongoEventListener.class);
|
||||
private static final Log LOG = LogFactory.getLog(AbstractMongoEventListener.class);
|
||||
private final Class<?> domainClass;
|
||||
|
||||
/**
|
||||
@@ -104,7 +105,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
public void onBeforeConvert(BeforeConvertEvent<E> event) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeConvert({})", SerializationUtils.serializeToJsonSafely(event.getSource()));
|
||||
LOG.debug(String.format("onBeforeConvert(%s)", SerializationUtils.serializeToJsonSafely(event.getSource())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,7 +118,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
public void onBeforeSave(BeforeSaveEvent<E> event) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeSave({}, {})", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument()));
|
||||
LOG.debug(String.format("onBeforeSave(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +131,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
public void onAfterSave(AfterSaveEvent<E> event) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterSave({}, {})", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument()));
|
||||
LOG.debug(String.format("onAfterSave(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getSource()), SerializationUtils.serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,7 +144,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
public void onAfterLoad(AfterLoadEvent<E> event) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterLoad({})", SerializationUtils.serializeToJsonSafely(event.getDocument()));
|
||||
LOG.debug(String.format("onAfterLoad(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,7 +157,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
public void onAfterConvert(AfterConvertEvent<E> event) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterConvert({}, {})", SerializationUtils.serializeToJsonSafely(event.getDocument()), SerializationUtils.serializeToJsonSafely(event.getSource()));
|
||||
LOG.debug(String.format("onAfterConvert(%s, %s)", SerializationUtils.serializeToJsonSafely(event.getDocument()), SerializationUtils.serializeToJsonSafely(event.getSource())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,7 +170,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
public void onAfterDelete(AfterDeleteEvent<E> event) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onAfterDelete({})", SerializationUtils.serializeToJsonSafely(event.getDocument()));
|
||||
LOG.debug(String.format("onAfterDelete(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,7 +183,7 @@ public abstract class AbstractMongoEventListener<E> implements ApplicationListen
|
||||
public void onBeforeDelete(BeforeDeleteEvent<E> event) {
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("onBeforeDelete({})", SerializationUtils.serializeToJsonSafely(event.getDocument()));
|
||||
LOG.debug(String.format("onBeforeDelete(%s)", SerializationUtils.serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,8 +17,9 @@ package org.springframework.data.mongodb.core.mapping.event;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.context.ApplicationListener;
|
||||
|
||||
/**
|
||||
@@ -31,7 +32,7 @@ import org.springframework.context.ApplicationListener;
|
||||
*/
|
||||
public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(LoggingEventListener.class);
|
||||
private static final Log LOGGER = LogFactory.getLog(LoggingEventListener.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -39,7 +40,9 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeConvert(BeforeConvertEvent<Object> event) {
|
||||
LOGGER.info("onBeforeConvert: {}", event.getSource());
|
||||
if(LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(String.format("onBeforeConvert: %s", event.getSource()));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -48,7 +51,9 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeSave(BeforeSaveEvent<Object> event) {
|
||||
LOGGER.info("onBeforeSave: {}, {}", event.getSource(), serializeToJsonSafely(event.getDocument()));
|
||||
if(LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(String.format("onBeforeSave: %s, %s", event.getSource(), serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -57,7 +62,9 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onAfterSave(AfterSaveEvent<Object> event) {
|
||||
LOGGER.info("onAfterSave: {}, {}", event.getSource(), serializeToJsonSafely(event.getDocument()));
|
||||
if(LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(String.format("onAfterSave: %s, %s", event.getSource(), serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -66,7 +73,9 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onAfterLoad(AfterLoadEvent<Object> event) {
|
||||
LOGGER.info("onAfterLoad: {}", serializeToJsonSafely(event.getDocument()));
|
||||
if(LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(String.format("onAfterLoad: %s", serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -75,7 +84,9 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onAfterConvert(AfterConvertEvent<Object> event) {
|
||||
LOGGER.info("onAfterConvert: {}, {}", serializeToJsonSafely(event.getDocument()), event.getSource());
|
||||
if(LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(String.format("onAfterConvert: %s, %s", serializeToJsonSafely(event.getDocument()), event.getSource()));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -84,7 +95,9 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onAfterDelete(AfterDeleteEvent<Object> event) {
|
||||
LOGGER.info("onAfterDelete: {}", serializeToJsonSafely(event.getDocument()));
|
||||
if(LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(String.format("onAfterDelete: %s", serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -93,6 +106,8 @@ public class LoggingEventListener extends AbstractMongoEventListener<Object> {
|
||||
*/
|
||||
@Override
|
||||
public void onBeforeDelete(BeforeDeleteEvent<Object> event) {
|
||||
LOGGER.info("onBeforeDelete: {}", serializeToJsonSafely(event.getDocument()));
|
||||
if(LOGGER.isInfoEnabled()) {
|
||||
LOGGER.info(String.format("onBeforeDelete: %s", serializeToJsonSafely(event.getDocument())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,9 @@ import java.util.Set;
|
||||
import javax.validation.ConstraintViolationException;
|
||||
import javax.validation.Validator;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -34,7 +35,7 @@ import org.springframework.util.Assert;
|
||||
*/
|
||||
public class ValidatingMongoEventListener extends AbstractMongoEventListener<Object> {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ValidatingMongoEventListener.class);
|
||||
private static final Log LOG = LogFactory.getLog(ValidatingMongoEventListener.class);
|
||||
|
||||
private final Validator validator;
|
||||
|
||||
@@ -57,12 +58,16 @@ public class ValidatingMongoEventListener extends AbstractMongoEventListener<Obj
|
||||
@Override
|
||||
public void onBeforeSave(BeforeSaveEvent<Object> event) {
|
||||
|
||||
LOG.debug("Validating object: {}", event.getSource());
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug(String.format("Validating object: %s", event.getSource()));
|
||||
}
|
||||
Set violations = validator.validate(event.getSource());
|
||||
|
||||
if (!violations.isEmpty()) {
|
||||
|
||||
LOG.info("During object: {} validation violations found: {}", event.getSource(), violations);
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.info(String.format("During object: %s validation violations found: %s", event.getSource(), violations));
|
||||
}
|
||||
throw new ConstraintViolationException(violations);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,9 @@ package org.springframework.data.mongodb.core.mapreduce;
|
||||
*
|
||||
* @author Mark Pollack
|
||||
* @author Oliver Gierke
|
||||
* @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MapReduceCounts {
|
||||
|
||||
public static final MapReduceCounts NONE = new MapReduceCounts(-1, -1, -1);
|
||||
|
||||
@@ -30,7 +30,9 @@ import com.mongodb.client.model.MapReduceAction;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MapReduceOptions {
|
||||
|
||||
private @Nullable String outputCollection;
|
||||
|
||||
@@ -30,7 +30,9 @@ import org.springframework.util.Assert;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @param <T> The class in which the results are mapped onto, accessible via an iterator.
|
||||
* @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MapReduceResults<T> implements Iterable<T> {
|
||||
|
||||
private final List<T> mappedResults;
|
||||
|
||||
@@ -15,6 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.mapreduce;
|
||||
|
||||
/**
|
||||
* @deprecated since 3.4 in favor of {@link org.springframework.data.mongodb.core.aggregation}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class MapReduceTiming {
|
||||
|
||||
private long mapTime, emitLoopTime, totalTime;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user