Compare commits
72 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d855a0b07d | ||
|
|
d809ee0104 | ||
|
|
d138296123 | ||
|
|
e11560dffc | ||
|
|
4fbf01467e | ||
|
|
012d1245b0 | ||
|
|
d8eb0f124a | ||
|
|
bfeb896c70 | ||
|
|
efffc936fa | ||
|
|
563a3fb845 | ||
|
|
50ae6fd045 | ||
|
|
ae0e240334 | ||
|
|
852a461429 | ||
|
|
2cbed2a052 | ||
|
|
95667edec3 | ||
|
|
c1a52de8e5 | ||
|
|
7e94c1bdc3 | ||
|
|
c3259e395c | ||
|
|
3526b6a2d8 | ||
|
|
cb70a97ea8 | ||
|
|
52415bc702 | ||
|
|
43de140842 | ||
|
|
15b000ecce | ||
|
|
e428b9b977 | ||
|
|
6e38610ac1 | ||
|
|
e7af70efca | ||
|
|
39f5f91261 | ||
|
|
c48daa6d56 | ||
|
|
11356cd20f | ||
|
|
7385262c47 | ||
|
|
259938588a | ||
|
|
a1b4e3fc55 | ||
|
|
76479820bc | ||
|
|
c47bbc4a20 | ||
|
|
74791d0bca | ||
|
|
f4d2287011 | ||
|
|
ab6ba194c1 | ||
|
|
595a346705 | ||
|
|
08c5e5a810 | ||
|
|
f987217c3c | ||
|
|
92a22978c2 | ||
|
|
2e2e076b5b | ||
|
|
0c50d97887 | ||
|
|
c10d4b6af0 | ||
|
|
6644ac6875 | ||
|
|
708def0df1 | ||
|
|
889e5d52bb | ||
|
|
8930091b33 | ||
|
|
b1d750efed | ||
|
|
7a19593f02 | ||
|
|
9021445ccd | ||
|
|
db92c37502 | ||
|
|
99e5e2596e | ||
|
|
d0bf0e2e62 | ||
|
|
28efb3afbe | ||
|
|
99eb849c93 | ||
|
|
d33aa682e5 | ||
|
|
f6db089f6f | ||
|
|
13ae5e17bb | ||
|
|
ee203bf22a | ||
|
|
990696ba11 | ||
|
|
4bc2f108fe | ||
|
|
5064ba5b24 | ||
|
|
1179ded140 | ||
|
|
e12700c00b | ||
|
|
0507adab20 | ||
|
|
375ddf8afb | ||
|
|
283cf06dc1 | ||
|
|
10a8456581 | ||
|
|
e751a43cdf | ||
|
|
c987ba5f83 | ||
|
|
950bae0306 |
4
.mvn/wrapper/maven-wrapper.properties
vendored
4
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1,2 @@
|
||||
#Mon Oct 11 14:30:24 CEST 2021
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.3/apache-maven-3.8.3-bin.zip
|
||||
#Fri Sep 10 15:39:33 CEST 2021
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.2/apache-maven-3.8.2-bin.zip
|
||||
|
||||
2
CI.adoc
2
CI.adoc
@@ -1,6 +1,6 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
|
||||
91
Jenkinsfile
vendored
91
Jenkinsfile
vendored
@@ -3,7 +3,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/2.5.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -14,22 +14,6 @@ pipeline {
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 5.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-5.0/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-5.0.0", "ci/openjdk8-mongodb-5.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
@@ -62,16 +46,16 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 16 + MongoDB 4.4') {
|
||||
stage('Publish JDK 15 + MongoDB 4.4') {
|
||||
when {
|
||||
changeset "ci/openjdk16-mongodb-4.4/**"
|
||||
changeset "ci/openjdk15-mongodb-4.4/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk16-with-mongodb-4.4.4", "ci/openjdk16-mongodb-4.4/")
|
||||
def image = docker.build("springci/spring-data-openjdk15-with-mongodb-4.4.4", "ci/openjdk15-mongodb-4.4/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
@@ -83,9 +67,8 @@ pipeline {
|
||||
|
||||
stage("test: baseline (jdk8)") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
branch '3.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -114,9 +97,8 @@ pipeline {
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
allOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
branch '3.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -169,31 +151,7 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 5.0 (jdk8)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-5.0.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk16)") {
|
||||
stage("test: baseline (jdk15)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
@@ -204,7 +162,7 @@ pipeline {
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk16-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk15-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
@@ -221,9 +179,8 @@ pipeline {
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
branch '3.2.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -240,7 +197,6 @@ pipeline {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -v'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
@@ -254,6 +210,35 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch '3.2.x'
|
||||
}
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
|
||||
123
README.adoc
123
README.adoc
@@ -1,6 +1,6 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
@@ -8,12 +8,10 @@ The Spring Data MongoDB project aims to provide a familiar and consistent Spring
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
|
||||
|
||||
[[code-of-conduct]]
|
||||
== Code of Conduct
|
||||
|
||||
This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
|
||||
[[getting-started]]
|
||||
== Getting Started
|
||||
|
||||
Here is a quick teaser of an application using Spring Data Repositories in Java:
|
||||
@@ -61,7 +59,6 @@ class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
}
|
||||
----
|
||||
|
||||
[[maven-configuration]]
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
@@ -71,25 +68,24 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}</version>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository
|
||||
and declare the appropriate dependency version.
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}-SNAPSHOT</version>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-snapshot</id>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/snapshot</url>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
@@ -102,7 +98,7 @@ Some of the changes affect the initial setup configuration as well as compile/ru
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | 2.x | 3.x
|
||||
Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
@@ -120,7 +116,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | Replacement in 3.x | Comment
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
@@ -137,7 +133,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element | Comment
|
||||
Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
@@ -157,7 +153,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
| Type | Comment
|
||||
Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
@@ -178,7 +174,7 @@ Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
| 2.x | Replacement in 3.x | Comment
|
||||
2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
@@ -230,7 +226,6 @@ static class Config extends AbstractMongoClientConfiguration {
|
||||
----
|
||||
====
|
||||
|
||||
[[getting-help]]
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
@@ -244,7 +239,6 @@ If you are just starting out with Spring, try one of the https://spring.io/guide
|
||||
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
|
||||
* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues].
|
||||
|
||||
[[reporting-issues]]
|
||||
== Reporting Issues
|
||||
|
||||
Spring Data uses Github as issue tracking system to record bugs and feature requests.
|
||||
@@ -255,86 +249,10 @@ If you want to raise an issue, please follow the recommendations below:
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc.
|
||||
* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++.
|
||||
|
||||
[[guides]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
[[examples]]
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
[[building-from-source]]
|
||||
== Building from Source
|
||||
|
||||
You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io]
|
||||
and accessible from Maven using the Maven configuration noted <<maven-configuration,above>>.
|
||||
|
||||
NOTE: Configuration for Gradle is similar to Maven.
|
||||
|
||||
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||
to build a reactive one.
|
||||
|
||||
However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper]
|
||||
and minimally, JDK 8 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||
|
||||
In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download]
|
||||
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
||||
|
||||
Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to
|
||||
your MongoDB installation directory (e.g. `MONGODB_HOME`).
|
||||
|
||||
To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set]
|
||||
is required.
|
||||
|
||||
To run the MongoDB server enter the following command from a command-line:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0
|
||||
...
|
||||
"msg":"Successfully connected to host"
|
||||
----
|
||||
|
||||
Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_".
|
||||
|
||||
Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set
|
||||
the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`).
|
||||
|
||||
You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started.
|
||||
To initialize the replica set, start a mongo client:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongo
|
||||
MongoDB server version: 5.0.0
|
||||
...
|
||||
----
|
||||
|
||||
Then enter the following command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] })
|
||||
----
|
||||
|
||||
Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`.
|
||||
In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation):
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ulimit -n 32768
|
||||
----
|
||||
|
||||
You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately.
|
||||
|
||||
Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command:
|
||||
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
|
||||
You also need JDK 1.8.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
@@ -343,8 +261,7 @@ Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
||||
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
=== Building reference documentation
|
||||
|
||||
@@ -357,7 +274,17 @@ Building the documentation builds also the project without running tests.
|
||||
|
||||
The generated documentation is available from `target/site/reference/html/index.html`.
|
||||
|
||||
[[license]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
== License
|
||||
|
||||
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
|
||||
|
||||
@@ -4,9 +4,6 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
FROM adoptopenjdk/openjdk16:latest
|
||||
FROM adoptopenjdk/openjdk15:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
@@ -4,9 +4,6 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
|
||||
@@ -4,9 +4,6 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \
|
||||
# MongoDB 5.0 release signing key
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \
|
||||
# Needed when MongoDB creates a 5.0 folder.
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update; \
|
||||
apt-get install -y mongodb-org=5.0.3 mongodb-org-server=5.0.3 mongodb-org-shell=5.0.3 mongodb-org-mongos=5.0.3 mongodb-org-tools=5.0.3; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
20
pom.xml
20
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M2</version>
|
||||
<version>3.2.7</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.7.0-M2</version>
|
||||
<version>2.5.7</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.7.0-M2</springdata.commons>
|
||||
<mongo>4.4.1</mongo>
|
||||
<springdata.commons>2.5.7</springdata.commons>
|
||||
<mongo>4.2.3</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
@@ -134,18 +134,18 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-milestone</id>
|
||||
<url>https://repo.spring.io/libs-milestone</url>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M2</version>
|
||||
<version>3.2.7</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M2</version>
|
||||
<version>3.2.7</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.4.0-M2</version>
|
||||
<version>3.2.7</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,13 +87,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
@@ -237,6 +230,13 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jul-to-slf4j</artifactId>
|
||||
<version>${slf4j}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>nl.jqno.equalsverifier</groupId>
|
||||
<artifactId>equalsverifier</artifactId>
|
||||
@@ -310,15 +310,6 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- jMolecules -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jmolecules</groupId>
|
||||
<artifactId>jmolecules-ddd</artifactId>
|
||||
<version>${jmolecules}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -31,7 +31,7 @@ import org.springframework.util.StringUtils;
|
||||
* expression. The expression will be wrapped within <code>{ ... }</code> if necessary. The actual parsing and parameter
|
||||
* binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via
|
||||
* {@link #toDocument()}.
|
||||
* <br />
|
||||
* <p />
|
||||
*
|
||||
* <pre class="code">
|
||||
* $toUpper : $name -> { '$toUpper' : '$name' }
|
||||
|
||||
@@ -20,8 +20,8 @@ import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Helper class featuring helper methods for working with MongoDb collections.
|
||||
* <br />
|
||||
* <br />
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Mainly intended for internal use within the framework.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
|
||||
@@ -30,7 +30,7 @@ import com.mongodb.client.MongoDatabase;
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -43,7 +43,7 @@ public class MongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -56,7 +56,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -71,7 +71,7 @@ public class MongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -85,7 +85,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
|
||||
@@ -18,7 +18,7 @@ package org.springframework.data.mongodb;
|
||||
/**
|
||||
* Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when
|
||||
* passed on to the driver.
|
||||
* <br />
|
||||
* <p />
|
||||
* A set of predefined {@link MongoExpression expressions}, including a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method
|
||||
* like expressions (eg. {@code toUpper(name)}) are available via the
|
||||
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}.
|
||||
* {@link MongoTransactionManager} binds instances of this class to the thread.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
|
||||
@@ -37,18 +37,18 @@ import com.mongodb.client.ClientSession;
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* <br />
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <br />
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <br />
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -69,11 +69,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
@@ -212,8 +212,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit. <br />
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre>
|
||||
* int retries = 3;
|
||||
* do {
|
||||
* try {
|
||||
@@ -226,8 +226,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* }
|
||||
* Thread.sleep(500);
|
||||
* } while (--retries > 0);
|
||||
* </pre>
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @param transactionObject never {@literal null}.
|
||||
* @throws Exception in case of transaction errors.
|
||||
|
||||
@@ -36,7 +36,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
@@ -75,7 +75,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -88,7 +88,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -104,7 +104,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -119,7 +119,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
|
||||
@@ -38,21 +38,21 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <br />
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <br />
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <br />
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -71,11 +71,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mongodb.session.ClientSession;
|
||||
/**
|
||||
* {@link MethodInterceptor} implementation looking up and invoking an alternative target method having
|
||||
* {@link ClientSession} as its first argument. This allows seamless integration with the existing code base.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself
|
||||
* like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them
|
||||
* if not already proxied.
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.util.Version;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -31,7 +31,7 @@ import com.mongodb.MongoDriverInformation;
|
||||
*/
|
||||
public class SpringDataMongoDB {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(SpringDataMongoDB.class);
|
||||
|
||||
private static final Version FALLBACK_VERSION = new Version(3);
|
||||
private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation
|
||||
@@ -48,7 +48,7 @@ public class SpringDataMongoDB {
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <br />
|
||||
* <p />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
@@ -68,7 +68,7 @@ public class SpringDataMongoDB {
|
||||
try {
|
||||
return Version.parse(versionString);
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString));
|
||||
LOGGER.debug("Cannot read Spring Data MongoDB version '{}'.", versionString);
|
||||
}
|
||||
|
||||
return FALLBACK_VERSION;
|
||||
|
||||
@@ -172,7 +172,8 @@ public abstract class MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Configures whether to abbreviate field names for domain objects by configuring a
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created.
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced
|
||||
* customization needs, consider overriding {@link #mappingMongoConverter()}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -22,12 +22,9 @@ import java.util.Map;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionValidationException;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoServerApiFactoryBean;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -115,20 +112,6 @@ abstract class MongoParsingUtils {
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// ServerAPI
|
||||
if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) {
|
||||
|
||||
MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean();
|
||||
serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version"));
|
||||
try {
|
||||
clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject());
|
||||
} catch (Exception exception) {
|
||||
throw new BeanDefinitionValidationException("Non parsable server-api.", exception);
|
||||
}
|
||||
} else {
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi");
|
||||
}
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
@@ -21,8 +21,8 @@ import java.net.UnknownHostException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -43,8 +43,8 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
* A port is a number without a leading 0 at the end of the address that is proceeded by just a single :.
|
||||
*/
|
||||
private static final String HOST_PORT_SPLIT_PATTERN = "(?<!:):(?=[123456789]\\d*$)";
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address %s '%s'. Check your replica set configuration!";
|
||||
private static final Log LOG = LogFactory.getLog(ServerAddressPropertyEditor.class);
|
||||
private static final String COULD_NOT_PARSE_ADDRESS_MESSAGE = "Could not parse address {} '{}'. Check your replica set configuration!";
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ServerAddressPropertyEditor.class);
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -88,18 +88,14 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
private ServerAddress parseServerAddress(String source) {
|
||||
|
||||
if (!StringUtils.hasText(source)) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
String[] hostAndPort = extractHostAddressAndPort(source.trim());
|
||||
|
||||
if (hostAndPort.length > 2) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source);
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -109,13 +105,9 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport {
|
||||
|
||||
return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port);
|
||||
} catch (UnknownHostException e) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
if(LOG.isWarnEnabled()) {
|
||||
LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]));
|
||||
}
|
||||
LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -242,13 +242,13 @@ public class ChangeStreamOptions {
|
||||
|
||||
/**
|
||||
* Set the filter to apply.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Fields on aggregation expression root level are prefixed to map to fields contained in
|
||||
* {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns},
|
||||
* {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken
|
||||
* as given, during the mapping procedure. You may want to have a look at the
|
||||
* <a href="https://docs.mongodb.com/manual/reference/change-events/">structure of Change Events</a>.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are
|
||||
* mapped to domain type fields.
|
||||
*
|
||||
|
||||
@@ -17,11 +17,8 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.timeseries.GranularityDefinition;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -45,7 +42,6 @@ public class CollectionOptions {
|
||||
private @Nullable Boolean capped;
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
@@ -58,19 +54,17 @@ public class CollectionOptions {
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none(), null);
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none());
|
||||
}
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions,
|
||||
@Nullable TimeSeriesOptions timeSeriesOptions) {
|
||||
@Nullable Collation collation, ValidationOptions validationOptions) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
this.capped = capped;
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
this.timeSeriesOptions = timeSeriesOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -84,7 +78,7 @@ public class CollectionOptions {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null);
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -94,21 +88,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use
|
||||
* {@link #timeSeries(TimeSeriesOptions)}.
|
||||
*
|
||||
* @param timeField The name of the property which contains the date in each time series document. Must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #timeSeries(TimeSeriesOptions)
|
||||
* @since 3.3
|
||||
*/
|
||||
public static CollectionOptions timeSeries(String timeField) {
|
||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -119,7 +99,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -130,7 +110,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -141,7 +121,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -152,7 +132,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -272,20 +252,7 @@ public class CollectionOptions {
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param timeSeriesOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -336,16 +303,6 @@ public class CollectionOptions {
|
||||
return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link TimeSeriesOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<TimeSeriesOptions> getTimeSeriesOptions() {
|
||||
return Optional.ofNullable(timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
@@ -428,7 +385,7 @@ public class CollectionOptions {
|
||||
/**
|
||||
* Get the {@code validationAction} to perform.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @return @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<ValidationAction> getValidationAction() {
|
||||
return Optional.ofNullable(validationAction);
|
||||
@@ -441,89 +398,4 @@ public class CollectionOptions {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options applicable to Time Series collections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
public static class TimeSeriesOptions {
|
||||
|
||||
private final String timeField;
|
||||
|
||||
private @Nullable final String metaField;
|
||||
|
||||
private final GranularityDefinition granularity;
|
||||
|
||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||
|
||||
Assert.hasText(timeField, "Time field must not be empty or null!");
|
||||
|
||||
this.timeField = timeField;
|
||||
this.metaField = metaField;
|
||||
this.granularity = granularity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one,
|
||||
* that contains the date in each time series document. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param timeField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public static TimeSeriesOptions timeSeries(String timeField) {
|
||||
return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the name of the field which contains metadata in each time series document. Should not be the {@literal id}
|
||||
* nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or
|
||||
* {@link java.util.Collection}. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param metaField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public TimeSeriesOptions metaField(String metaField) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized.
|
||||
* Select one that is closest to the time span between incoming measurements.
|
||||
*
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
* @see Granularity
|
||||
*/
|
||||
public TimeSeriesOptions granularity(GranularityDefinition granularity) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getTimeField() {
|
||||
return timeField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via
|
||||
* {@link org.springframework.util.StringUtils#hasText(String)}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getMetaField() {
|
||||
return metaField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public GranularityDefinition getGranularity() {
|
||||
return granularity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Encryption algorithms supported by MongoDB Client Side Field Level Encryption.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public final class EncryptionAlgorithms {
|
||||
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic";
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random";
|
||||
|
||||
}
|
||||
@@ -23,34 +23,25 @@ import java.util.Optional;
|
||||
import org.bson.Document;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.IdentifierAccessor;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.TimeSeries;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.EntityProjectionIntrospector;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
@@ -68,19 +59,8 @@ class EntityOperations {
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
|
||||
private final EntityProjectionIntrospector introspector;
|
||||
|
||||
EntityOperations(MongoConverter converter) {
|
||||
this(converter.getMappingContext(), converter.getCustomConversions(), converter.getProjectionFactory());
|
||||
}
|
||||
|
||||
EntityOperations(MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context,
|
||||
CustomConversions conversions, ProjectionFactory projectionFactory) {
|
||||
EntityOperations(MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
this.context = context;
|
||||
this.introspector = EntityProjectionIntrospector.create(projectionFactory,
|
||||
EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy()
|
||||
.and(((target, underlyingType) -> !conversions.isSimpleType(target))),
|
||||
context);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -245,20 +225,6 @@ class EntityOperations {
|
||||
return UntypedOperations.instance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspect the given {@link Class result type} in the context of the {@link Class entity type} whether the returned
|
||||
* type is a projection and what property paths are participating in the projection.
|
||||
*
|
||||
* @param resultType the type to project on. Must not be {@literal null}.
|
||||
* @param entityType the source domain type. Must not be {@literal null}.
|
||||
* @return the introspection result.
|
||||
* @since 3.4
|
||||
* @see EntityProjectionIntrospector#introspect(Class, Class)
|
||||
*/
|
||||
public <M, D> EntityProjection<M, D> introspectProjection(Class<M> resultType, Class<D> entityType) {
|
||||
return introspector.introspect(resultType, entityType);
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of information about an entity.
|
||||
*
|
||||
@@ -812,24 +778,6 @@ class EntityOperations {
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
|
||||
/**
|
||||
* Derive the applicable {@link CollectionOptions} for the given type.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
CollectionOptions getCollectionOptions();
|
||||
|
||||
/**
|
||||
* Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially
|
||||
* annotated field names.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -869,16 +817,6 @@ class EntityOperations {
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
return CollectionOptions.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) {
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -916,58 +854,6 @@ class EntityOperations {
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
|
||||
CollectionOptions collectionOptions = CollectionOptions.empty();
|
||||
if (entity.hasCollation()) {
|
||||
collectionOptions = collectionOptions.collation(entity.getCollation());
|
||||
}
|
||||
|
||||
if (entity.isAnnotationPresent(TimeSeries.class)) {
|
||||
|
||||
TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class);
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.timeField()) == null) {
|
||||
throw new MappingException(String.format("Time series field '%s' does not exist in type %s",
|
||||
timeSeries.timeField(), entity.getName()));
|
||||
}
|
||||
|
||||
TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField());
|
||||
if (StringUtils.hasText(timeSeries.metaField())) {
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.metaField()) == null) {
|
||||
throw new MappingException(
|
||||
String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName()));
|
||||
}
|
||||
|
||||
options = options.metaField(timeSeries.metaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(timeSeries.granularity())) {
|
||||
options = options.granularity(timeSeries.granularity());
|
||||
}
|
||||
collectionOptions = collectionOptions.timeSeries(options);
|
||||
}
|
||||
|
||||
return collectionOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) {
|
||||
|
||||
TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField()));
|
||||
|
||||
if (StringUtils.hasText(source.getMetaField())) {
|
||||
target = target.metaField(mappedNameOrDefault(source.getMetaField()));
|
||||
}
|
||||
return target.granularity(source.getGranularity());
|
||||
}
|
||||
|
||||
private String mappedNameOrDefault(String name) {
|
||||
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
|
||||
return persistentProperty != null ? persistentProperty.getFieldName() : name;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ public interface ExecutableFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation
|
||||
* execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard,
|
||||
* session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
|
||||
@@ -89,7 +89,7 @@ public interface ExecutableUpdateOperation {
|
||||
|
||||
/**
|
||||
* Trigger
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
|
||||
@@ -17,7 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Options for
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>.
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>.
|
||||
* <br />
|
||||
* Defaults to
|
||||
* <dl>
|
||||
|
||||
@@ -115,10 +115,6 @@ abstract class IndexConverters {
|
||||
ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class)));
|
||||
}
|
||||
|
||||
if (indexOptions.containsKey("wildcardProjection")) {
|
||||
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
||||
}
|
||||
|
||||
return ops;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -20,20 +20,13 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
@@ -41,12 +34,10 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
@@ -61,7 +52,6 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final Predicate<JsonSchemaPropertyContext> filter;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
@@ -71,24 +61,10 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
this(converter, (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter.getMappingContext(),
|
||||
(property) -> true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter,
|
||||
MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
Predicate<JsonSchemaPropertyContext> filter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = mappingContext;
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter) {
|
||||
return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter);
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -101,29 +77,11 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
{
|
||||
Encrypted encrypted = entity.findAnnotation(Encrypted.class);
|
||||
if (encrypted != null) {
|
||||
|
||||
Document encryptionMetadata = new Document();
|
||||
|
||||
Collection<Object> encryptionKeyIds = entity.getEncryptionKeyIds();
|
||||
if (!CollectionUtils.isEmpty(encryptionKeyIds)) {
|
||||
encryptionMetadata.append("keyId", encryptionKeyIds);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
encryptionMetadata.append("algorithm", encrypted.algorithm());
|
||||
}
|
||||
|
||||
schemaBuilder.encryptionMetadata(encryptionMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -135,11 +93,6 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (!filter.test(new PropertyContext(
|
||||
currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")), nested))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
@@ -161,88 +114,21 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (!isCollection(property) && property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
JsonSchemaProperty schemaProperty;
|
||||
if (isCollection(property)) {
|
||||
schemaProperty = createArraySchemaProperty(fieldName, property, required);
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
schemaProperty = createEnumSchemaProperty(fieldName, targetType, required);
|
||||
} else {
|
||||
schemaProperty = createSchemaProperty(fieldName, targetType, required);
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return applyEncryptionDataIfNecessary(property, schemaProperty);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property,
|
||||
boolean required) {
|
||||
|
||||
ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName);
|
||||
|
||||
if (isSpecificType(property)) {
|
||||
schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(schemaProperty, required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property,
|
||||
ArrayJsonSchemaProperty schemaProperty) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getPersistentEntity(property.getTypeInformation().getRequiredComponentType());
|
||||
|
||||
if (persistentEntity != null) {
|
||||
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity);
|
||||
|
||||
if (nestedProperties.isEmpty()) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
return schemaProperty
|
||||
.items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0])));
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignable(Enum.class, property.getActualType())) {
|
||||
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) property.getActualType());
|
||||
|
||||
return schemaProperty
|
||||
.items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues));
|
||||
}
|
||||
|
||||
return schemaProperty.items(JsonSchemaObject.of(property.getActualType()));
|
||||
}
|
||||
|
||||
private boolean isSpecificType(MongoPersistentProperty property) {
|
||||
return !ClassTypeInformation.OBJECT.equals(property.getTypeInformation().getActualType());
|
||||
}
|
||||
|
||||
private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property,
|
||||
JsonSchemaProperty schemaProperty) {
|
||||
|
||||
Encrypted encrypted = property.findAnnotation(Encrypted.class);
|
||||
if (encrypted == null) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty);
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
enc = enc.algorithm(encrypted.algorithm());
|
||||
}
|
||||
if (!ObjectUtils.isEmpty(encrypted.keyId())) {
|
||||
enc = enc.keys(property.getEncryptionKeyIds());
|
||||
}
|
||||
return enc;
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -256,12 +142,15 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) targetType);
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
targetType = computeTargetType(targetType, possibleValues);
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
@@ -272,20 +161,14 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues);
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private TypedJsonSchemaObject createSchemaObject(Object type, Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
return schemaObject;
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
@@ -316,53 +199,12 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
private static Class<?> computeTargetType(Class<?> fallback, List<Object> possibleValues) {
|
||||
return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass();
|
||||
}
|
||||
|
||||
private <E extends Enum<E>> List<Object> getPossibleEnumValues(Class<E> targetType) {
|
||||
|
||||
EnumSet<E> enumSet = EnumSet.allOf(targetType);
|
||||
List<Object> possibleValues = new ArrayList<>(enumSet.size());
|
||||
|
||||
for (Object enumValue : enumSet) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
return possibleValues;
|
||||
}
|
||||
|
||||
private static boolean isCollection(MongoPersistentProperty property) {
|
||||
return property.isCollectionLike() && !property.getType().equals(byte[].class);
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
return required ? JsonSchemaProperty.required(property) : property;
|
||||
}
|
||||
|
||||
class PropertyContext implements JsonSchemaPropertyContext {
|
||||
|
||||
private final String path;
|
||||
private final MongoPersistentProperty property;
|
||||
|
||||
public PropertyContext(String path, MongoPersistentProperty property) {
|
||||
this.path = path;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getProperty() {
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property) {
|
||||
return (MongoPersistentEntity<T>) mappingContext.getPersistentEntity(property);
|
||||
}
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,6 @@ import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
@@ -114,7 +113,6 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
private @Nullable ServerApi serverApi;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
@@ -397,15 +395,6 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverApi can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#serverApi(ServerApi)
|
||||
* @since 3.3
|
||||
*/
|
||||
public void setServerApi(@Nullable ServerApi serverApi) {
|
||||
this.serverApi = serverApi;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
@@ -487,11 +476,9 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
|
||||
if (uUidRepresentation != null) {
|
||||
builder = builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
if (serverApi != null) {
|
||||
builder = builder.serverApi(serverApi);
|
||||
builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
|
||||
@@ -33,7 +33,7 @@ import com.mongodb.client.MongoDatabase;
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
|
||||
@@ -20,7 +20,7 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
|
||||
@@ -15,23 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -40,7 +24,6 @@ import org.springframework.util.Assert;
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
@@ -62,8 +45,7 @@ import org.springframework.util.Assert;
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
|
||||
* {@link Encrypted} properties will contain {@literal encrypt} information.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
@@ -78,88 +60,6 @@ public interface MongoJsonSchemaCreator {
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Filter matching {@link JsonSchemaProperty properties}.
|
||||
*
|
||||
* @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter);
|
||||
|
||||
/**
|
||||
* The context in which a specific {@link #getProperty()} is encountered during schema creation.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
interface JsonSchemaPropertyContext {
|
||||
|
||||
/**
|
||||
* The path to a given field/property in dot notation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getPath();
|
||||
|
||||
/**
|
||||
* The current property.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoPersistentProperty getProperty();
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoPersistentEntity} for a given property.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return {@literal null} if the property is not an entity. It is nevertheless recommend to check
|
||||
* {@link PersistentProperty#isEntity()} first.
|
||||
*/
|
||||
@Nullable
|
||||
<T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones.
|
||||
*
|
||||
* @return new instance of {@link Predicate}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static Predicate<JsonSchemaPropertyContext> encryptedOnly() {
|
||||
|
||||
return new Predicate<JsonSchemaPropertyContext>() {
|
||||
|
||||
// cycle guard
|
||||
private final Set<MongoPersistentProperty> seen = new HashSet<>();
|
||||
|
||||
@Override
|
||||
public boolean test(JsonSchemaPropertyContext context) {
|
||||
return extracted(context.getProperty(), context);
|
||||
}
|
||||
|
||||
private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) {
|
||||
if (property.isAnnotationPresent(Encrypted.class)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!property.isEntity() || seen.contains(property)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
seen.add(property);
|
||||
|
||||
for (MongoPersistentProperty nested : context.resolveEntity(property)) {
|
||||
if (extracted(nested, context)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
@@ -172,41 +72,4 @@ public interface MongoJsonSchemaCreator {
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential
|
||||
* {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We
|
||||
* recommend to use {@link #create(MappingContext)}.
|
||||
*
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create() {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER);
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <br />
|
||||
* <p/>
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -125,7 +125,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes a {@link DbCallback} translating any exceptions as necessary.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not
|
||||
@@ -138,7 +138,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the entity collection of the specified class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
@@ -151,7 +151,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the collection of the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be
|
||||
@@ -176,7 +176,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
|
||||
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
|
||||
*
|
||||
@@ -212,7 +212,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -300,7 +300,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* is created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -310,7 +310,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the name of the collection. Must not be {@literal null}.
|
||||
@@ -320,7 +320,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a given name exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -330,7 +330,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the name indicated by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}.
|
||||
@@ -339,7 +339,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection to drop/delete.
|
||||
@@ -403,10 +403,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -417,10 +417,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -539,11 +539,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -557,10 +557,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -576,10 +576,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -702,10 +702,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
|
||||
* specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -720,10 +720,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -768,10 +768,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -784,10 +784,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a List of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -881,7 +881,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -897,7 +897,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -914,7 +914,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -934,7 +934,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -957,7 +957,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* The collection name is derived from the {@literal replacement} type. <br />
|
||||
@@ -977,7 +977,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document.<br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
@@ -997,7 +997,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1018,7 +1018,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1041,7 +1041,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1066,7 +1066,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1094,7 +1094,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1120,9 +1120,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
* database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1137,10 +1137,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type. The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1160,7 +1160,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
@@ -1182,7 +1182,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
@@ -1199,7 +1199,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
@@ -1215,7 +1215,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
@@ -1232,7 +1232,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
@@ -1249,17 +1249,17 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1270,12 +1270,12 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1315,16 +1315,16 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1336,15 +1336,16 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* <br />
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.ServerApi.Builder;
|
||||
import com.mongodb.ServerApiVersion;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class MongoServerApiFactoryBean implements FactoryBean<ServerApi> {
|
||||
|
||||
private String version;
|
||||
private @Nullable Boolean deprecationErrors;
|
||||
private @Nullable Boolean strict;
|
||||
|
||||
/**
|
||||
* @param version the version string either as the enum name or the server version value.
|
||||
* @see ServerApiVersion
|
||||
*/
|
||||
public void setVersion(String version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param deprecationErrors
|
||||
* @see ServerApi.Builder#deprecationErrors(boolean)
|
||||
*/
|
||||
public void setDeprecationErrors(@Nullable Boolean deprecationErrors) {
|
||||
this.deprecationErrors = deprecationErrors;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param strict
|
||||
* @see ServerApi.Builder#strict(boolean)
|
||||
*/
|
||||
public void setStrict(@Nullable Boolean strict) {
|
||||
this.strict = strict;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public ServerApi getObject() throws Exception {
|
||||
|
||||
Builder builder = ServerApi.builder().version(version());
|
||||
|
||||
if (deprecationErrors != null) {
|
||||
builder = builder.deprecationErrors(deprecationErrors);
|
||||
}
|
||||
if (strict != null) {
|
||||
builder = builder.strict(strict);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return ServerApi.class;
|
||||
}
|
||||
|
||||
private ServerApiVersion version() {
|
||||
try {
|
||||
// lookup by name eg. 'V1'
|
||||
return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// or just the version number, eg. just '1'
|
||||
return ServerApiVersion.findByValue(version);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,11 +24,10 @@ import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
@@ -47,7 +46,6 @@ import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
@@ -99,12 +97,12 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.jca.cci.core.ConnectionCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -163,7 +161,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(MongoTemplate.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
|
||||
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
@@ -173,6 +171,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final JsonSchemaMapper schemaMapper;
|
||||
private final SpelAwareProxyProjectionFactory projectionFactory;
|
||||
private final EntityOperations operations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
private final QueryOperations queryOperations;
|
||||
@@ -224,7 +223,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.queryMapper = new QueryMapper(this.mongoConverter);
|
||||
this.updateMapper = new UpdateMapper(this.mongoConverter);
|
||||
this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter);
|
||||
this.operations = new EntityOperations(this.mongoConverter);
|
||||
this.projectionFactory = new SpelAwareProxyProjectionFactory();
|
||||
this.operations = new EntityOperations(this.mongoConverter.getMappingContext());
|
||||
this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext());
|
||||
this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations,
|
||||
mongoDbFactory);
|
||||
@@ -262,6 +262,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.queryMapper = that.queryMapper;
|
||||
this.updateMapper = that.updateMapper;
|
||||
this.schemaMapper = that.schemaMapper;
|
||||
this.projectionFactory = that.projectionFactory;
|
||||
this.mappingContext = that.mappingContext;
|
||||
this.operations = that.operations;
|
||||
this.propertyOperations = that.propertyOperations;
|
||||
@@ -327,12 +328,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
resourceLoader = applicationContext;
|
||||
|
||||
projectionFactory.setBeanFactory(applicationContext);
|
||||
projectionFactory.setBeanClassLoader(applicationContext.getClassLoader());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link EntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Overrides potentially existing {@link EntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
@@ -410,17 +414,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext.getPersistentEntity(entityType);
|
||||
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
EntityProjection<T, ?> projection = operations.introspectProjection(returnType,
|
||||
entityType);
|
||||
|
||||
Document mappedQuery = queryContext.getMappedQuery(persistentEntity);
|
||||
Document mappedFields = queryContext.getMappedFields(persistentEntity, projection);
|
||||
Document mappedFields = queryContext.getMappedFields(persistentEntity, returnType, projectionFactory);
|
||||
|
||||
FindIterable<Document> cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection,
|
||||
col -> col.find(mappedQuery, Document.class).projection(mappedFields));
|
||||
|
||||
return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator,
|
||||
new ProjectingReadCallback<>(mongoConverter, projection, collectionName));
|
||||
new ProjectingReadCallback<>(mongoConverter, entityType, returnType, collectionName));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -503,8 +505,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Document fieldsObject = query.getFieldsObject();
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing query: %s sort: %s fields: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObject), sortObject, fieldsObject, collectionName));
|
||||
LOGGER.debug("Executing query: {} sort: {} fields: {} in collection: {}", serializeToJsonSafely(queryObject),
|
||||
sortObject, fieldsObject, collectionName);
|
||||
}
|
||||
|
||||
this.executeQueryInternal(new FindCallback(queryObject, fieldsObject, null),
|
||||
@@ -594,7 +596,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> MongoCollection<Document> createCollection(Class<T> entityClass) {
|
||||
return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions());
|
||||
return createCollection(entityClass, CollectionOptions.empty());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -696,8 +698,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
execute(collectionName, (CollectionCallback<Void>) collection -> {
|
||||
collection.drop();
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Dropped collection [%s]",
|
||||
collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName));
|
||||
LOGGER.debug("Dropped collection [{}]",
|
||||
collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
@@ -899,8 +901,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
MongoIterable<?> result = execute(collectionName, (collection) -> {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s",
|
||||
serializeToJsonSafely(mappedQuery), field, collectionName));
|
||||
LOGGER.debug("Executing findDistinct using query {} for field: {} in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), field, collectionName);
|
||||
}
|
||||
|
||||
QueryCursorPreparer preparer = new QueryCursorPreparer(query, entityClass);
|
||||
@@ -960,11 +962,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
|
||||
AggregationResults<Document> results = aggregate($geoNear, collection, Document.class);
|
||||
EntityProjection<T, ?> projection = operations.introspectProjection(returnType,
|
||||
domainType);
|
||||
|
||||
DocumentCallback<GeoResult<T>> callback = new GeoNearResultDocumentCallback<>(distanceField,
|
||||
new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric());
|
||||
new ProjectingReadCallback<>(mongoConverter, domainType, returnType, collection), near.getMetric());
|
||||
|
||||
List<GeoResult<T>> result = new ArrayList<>();
|
||||
|
||||
@@ -972,7 +972,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
for (Document element : results) {
|
||||
|
||||
GeoResult<T> geoResult = callback.doWith(element);
|
||||
aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue()));
|
||||
aggregate = aggregate.add(new BigDecimal(geoResult.getDistance().getValue()));
|
||||
result.add(geoResult);
|
||||
}
|
||||
|
||||
@@ -1048,10 +1048,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityType);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
|
||||
EntityProjection<T, S> projection = operations.introspectProjection(resultType,
|
||||
entityType);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, projection);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, resultType, projectionFactory);
|
||||
Document mappedSort = queryContext.getMappedSort(entity);
|
||||
|
||||
replacement = maybeCallBeforeConvert(replacement, collectionName);
|
||||
@@ -1061,8 +1059,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
maybeCallBeforeSave(replacement, mappedReplacement, collectionName);
|
||||
|
||||
T saved = doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort,
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options,
|
||||
projection);
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, resultType);
|
||||
|
||||
if (saved != null) {
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, mappedReplacement, collectionName));
|
||||
@@ -1127,8 +1124,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
protected long doCount(String collectionName, Document filter, CountOptions options) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER
|
||||
.debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName));
|
||||
LOGGER.debug("Executing count: {} in collection: {}", serializeToJsonSafely(filter), collectionName);
|
||||
}
|
||||
|
||||
return execute(collectionName,
|
||||
@@ -1387,6 +1383,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return source.isVersionedEntity() //
|
||||
? doSaveVersioned(source, collectionName) //
|
||||
: (T) doSave(collectionName, objectToSave, this.mongoConverter);
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -1455,8 +1452,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
protected Object insertDocument(String collectionName, Document document, Class<?> entityClass) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Inserting Document containing fields: %s in collection: %s", document.keySet(),
|
||||
collectionName));
|
||||
LOGGER.debug("Inserting Document containing fields: {} in collection: {}", document.keySet(), collectionName);
|
||||
}
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
@@ -1481,7 +1477,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Inserting list of Documents containing %s items", documents.size()));
|
||||
LOGGER.debug("Inserting list of Documents containing {} items", documents.size());
|
||||
}
|
||||
|
||||
execute(collectionName, collection -> {
|
||||
@@ -1505,7 +1501,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
protected Object saveDocument(String collectionName, Document dbDoc, Class<?> entityClass) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Saving Document containing fields: %s", dbDoc.keySet()));
|
||||
LOGGER.debug("Saving Document containing fields: {}", dbDoc.keySet());
|
||||
}
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
@@ -1610,8 +1606,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
if (query.isSorted() && LOGGER.isWarnEnabled()) {
|
||||
|
||||
LOGGER.warn(String.format("%s does not support sort ('%s'). Please use findAndModify() instead.",
|
||||
upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject())));
|
||||
LOGGER.warn("{} does not support sort ('{}'). Please use findAndModify() instead.",
|
||||
upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject()));
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
@@ -1633,8 +1629,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName));
|
||||
LOGGER.debug("Calling update using query: {} and update: {} in collection: {}",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName);
|
||||
}
|
||||
|
||||
collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection;
|
||||
@@ -1651,8 +1647,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return execute(collectionName, collection -> {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s",
|
||||
serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName));
|
||||
LOGGER.debug("Calling update using query: {} and update: {} in collection: {}", serializeToJsonSafely(queryObj),
|
||||
serializeToJsonSafely(updateObj), collectionName);
|
||||
}
|
||||
|
||||
collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection;
|
||||
@@ -1742,8 +1738,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Document removeQuery = queryObject;
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery),
|
||||
collectionName));
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.",
|
||||
new Object[] { serializeToJsonSafely(removeQuery), collectionName });
|
||||
}
|
||||
|
||||
if (query.getLimit() > 0 || query.getSkip() > 0) {
|
||||
@@ -1957,13 +1953,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Document commandObject = new Document("group", document);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing Group with Document [%s]", serializeToJsonSafely(commandObject)));
|
||||
LOGGER.debug("Executing Group with Document [{}]", serializeToJsonSafely(commandObject));
|
||||
}
|
||||
|
||||
Document commandResult = executeCommand(commandObject, this.readPreference);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Group command result = [%s]", commandResult));
|
||||
LOGGER.debug("Group command result = [{}]", commandResult);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -2135,7 +2131,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Document command = aggregationUtil.createCommand(collectionName, aggregation, context);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing aggregation: %s", serializeToJsonSafely(command)));
|
||||
LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command));
|
||||
}
|
||||
|
||||
Document commandResult = executeCommand(command);
|
||||
@@ -2146,8 +2142,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
List<Document> pipeline = aggregationUtil.createPipeline(aggregation, context);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(
|
||||
String.format("Executing aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName));
|
||||
LOGGER.debug("Executing aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName);
|
||||
}
|
||||
|
||||
return execute(collectionName, collection -> {
|
||||
@@ -2213,8 +2208,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
List<Document> pipeline = aggregationDefinition.getAggregationPipeline();
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(
|
||||
String.format("Streaming aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName));
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName);
|
||||
}
|
||||
|
||||
ReadDocumentCallback<O> readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName);
|
||||
@@ -2440,28 +2434,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
co.validationOptions(options);
|
||||
}
|
||||
|
||||
if (collectionOptions.containsKey("timeseries")) {
|
||||
|
||||
Document timeSeries = collectionOptions.get("timeseries", Document.class);
|
||||
com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(
|
||||
timeSeries.getString("timeField"));
|
||||
if (timeSeries.containsKey("metaField")) {
|
||||
options.metaField(timeSeries.getString("metaField"));
|
||||
}
|
||||
if (timeSeries.containsKey("granularity")) {
|
||||
options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase()));
|
||||
}
|
||||
co.timeSeriesOptions(options);
|
||||
}
|
||||
|
||||
db.createCollection(collectionName, co);
|
||||
|
||||
MongoCollection<Document> coll = db.getCollection(collectionName, Document.class);
|
||||
|
||||
// TODO: Emit a collection created event
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Created collection [%s]",
|
||||
coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName));
|
||||
LOGGER.debug("Created collection [{}]",
|
||||
coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName);
|
||||
}
|
||||
return coll;
|
||||
});
|
||||
@@ -2500,13 +2480,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity,
|
||||
EntityProjection.nonProjecting(entityClass));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), mappedFields, entityClass, collectionName));
|
||||
LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query),
|
||||
mappedFields, entityClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields, preparer),
|
||||
@@ -2553,13 +2532,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity,
|
||||
EntityProjection.nonProjecting(entityClass));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName));
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields, null),
|
||||
@@ -2576,23 +2554,20 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Class<T> targetClass, CursorPreparer preparer) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
|
||||
EntityProjection<T, S> projection = operations.introspectProjection(targetClass,
|
||||
sourceClass);
|
||||
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, projection);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, targetClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName));
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields, null), preparer,
|
||||
new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName);
|
||||
new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Convert given {@link CollectionOptions} to a document and take the domain type information into account when
|
||||
* creating a mapped schema for validation. <br />
|
||||
@@ -2613,19 +2588,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() //
|
||||
.ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType))));
|
||||
|
||||
collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions)
|
||||
.ifPresent(it -> {
|
||||
|
||||
Document timeseries = new Document("timeField", it.getTimeField());
|
||||
if (StringUtils.hasText(it.getMetaField())) {
|
||||
timeseries.append("metaField", it.getMetaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(it.getGranularity())) {
|
||||
timeseries.append("granularity", it.getGranularity().name().toLowerCase());
|
||||
}
|
||||
doc.put("timeseries", timeseries);
|
||||
});
|
||||
}
|
||||
|
||||
return doc;
|
||||
@@ -2673,7 +2635,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter.
|
||||
* The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query document is specified as a standard Document and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
@@ -2688,8 +2650,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
EntityReader<? super T, Bson> readerToUse = this.mongoConverter;
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName));
|
||||
LOGGER.debug("findAndRemove using query: {} fields: {} sort: {} for class: {} in collection: {}",
|
||||
serializeToJsonSafely(query), fields, sort, entityClass, collectionName);
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
@@ -2719,10 +2681,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
: updateContext.getMappedUpdate(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format(
|
||||
"findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s in collection: %s",
|
||||
LOGGER.debug(
|
||||
"findAndModify using query: {} fields: {} sort: {} for class: {} and update: {} " + "in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate),
|
||||
collectionName));
|
||||
collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(
|
||||
@@ -2751,44 +2713,16 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, Class<?> entityType,
|
||||
Document replacement, FindAndReplaceOptions options, Class<T> resultType) {
|
||||
|
||||
EntityProjection<T, ?> projection = operations.introspectProjection(resultType,
|
||||
entityType);
|
||||
|
||||
return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort, collation, entityType, replacement,
|
||||
options, projection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Customize this part for findAndReplace.
|
||||
*
|
||||
* @param collectionName The name of the collection to perform the operation in.
|
||||
* @param mappedQuery the query to look up documents.
|
||||
* @param mappedFields the fields to project the result to.
|
||||
* @param mappedSort the sort to be applied when executing the query.
|
||||
* @param collation collation settings for the query. Can be {@literal null}.
|
||||
* @param entityType the source domain type.
|
||||
* @param replacement the replacement {@link Document}.
|
||||
* @param options applicable options.
|
||||
* @param projection the projection descriptor.
|
||||
* @return {@literal null} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is
|
||||
* {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}.
|
||||
* @since 3.4
|
||||
*/
|
||||
@Nullable
|
||||
private <T> T doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields,
|
||||
Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, Class<?> entityType,
|
||||
Document replacement, FindAndReplaceOptions options, EntityProjection<T, ?> projection) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format(
|
||||
"findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + "in collection: %s",
|
||||
LOGGER.debug(
|
||||
"findAndReplace using query: {} fields: {} sort: {} for class: {} and replacement: {} " + "in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), serializeToJsonSafely(mappedFields), serializeToJsonSafely(mappedSort),
|
||||
entityType, serializeToJsonSafely(replacement), collectionName));
|
||||
entityType, serializeToJsonSafely(replacement), collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(
|
||||
new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options),
|
||||
new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName);
|
||||
new ProjectingReadCallback<>(mongoConverter, entityType, resultType, collectionName), collectionName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2817,24 +2751,25 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* Internal method using callbacks to do queries against the datastore that requires reading a single object from a
|
||||
* collection of objects. It will take the following steps
|
||||
* <ol>
|
||||
* <li>Execute the given {@link CollectionCallback} for a {@link Document}.</li>
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link Document}.</li>
|
||||
* <li>Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.</li>
|
||||
* <ol>
|
||||
*
|
||||
* @param <T>
|
||||
* @param collectionCallback the callback to retrieve the {@link Document} with
|
||||
* @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private <T> T executeFindOneInternal(CollectionCallback<Document> collectionCallback,
|
||||
DocumentCallback<T> documentCallback, String collectionName) {
|
||||
DocumentCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName));
|
||||
return document != null ? documentCallback.doWith(document) : null;
|
||||
T result = objectCallback
|
||||
.doWith(collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)));
|
||||
return result;
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
@@ -2844,7 +2779,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* Internal method using callback to do queries against the datastore that requires reading a collection of objects.
|
||||
* It will take the following steps
|
||||
* <ol>
|
||||
* <li>Execute the given {@link CollectionCallback} for a {@link FindIterable}.</li>
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link FindIterable}.</li>
|
||||
* <li>Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if
|
||||
* {@link CursorPreparer} is {@literal null}</li>
|
||||
* <li>Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the
|
||||
@@ -2854,27 +2789,36 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @param <T>
|
||||
* @param collectionCallback the callback to retrieve the {@link FindIterable} with
|
||||
* @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it
|
||||
* @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
private <T> List<T> executeFindMultiInternal(CollectionCallback<FindIterable<Document>> collectionCallback,
|
||||
CursorPreparer preparer, DocumentCallback<T> documentCallback, String collectionName) {
|
||||
CursorPreparer preparer, DocumentCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
try (MongoCursor<Document> cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator()) {
|
||||
MongoCursor<Document> cursor = null;
|
||||
|
||||
try {
|
||||
|
||||
cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator();
|
||||
|
||||
List<T> result = new ArrayList<>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
Document object = cursor.next();
|
||||
result.add(documentCallback.doWith(object));
|
||||
result.add(objectCallback.doWith(object));
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
|
||||
if (cursor != null) {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -2884,12 +2828,23 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private void executeQueryInternal(CollectionCallback<FindIterable<Document>> collectionCallback,
|
||||
CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) {
|
||||
|
||||
try (MongoCursor<Document> cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator()) {
|
||||
try {
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
callbackHandler.processDocument(cursor.next());
|
||||
MongoCursor<Document> cursor = null;
|
||||
|
||||
try {
|
||||
|
||||
cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
callbackHandler.processDocument(cursor.next());
|
||||
}
|
||||
} finally {
|
||||
if (cursor != null) {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -2975,10 +2930,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
|
||||
LOGGER.debug(String.format("findOne using query: %s fields: %s in db.collection: %s",
|
||||
serializeToJsonSafely(query),
|
||||
LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query),
|
||||
serializeToJsonSafely(fields.orElseGet(Document::new)),
|
||||
collection.getNamespace() != null ? collection.getNamespace().getFullName() : "n/a"));
|
||||
collection.getNamespace() != null ? collection.getNamespace().getFullName() : "n/a");
|
||||
}
|
||||
|
||||
if (fields.isPresent()) {
|
||||
@@ -3189,7 +3143,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
interface DocumentCallback<T> {
|
||||
|
||||
T doWith(Document object);
|
||||
@Nullable
|
||||
T doWith(@Nullable Document object);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3213,19 +3168,22 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
public T doWith(Document document) {
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
T entity = reader.read(type, document);
|
||||
T source = null;
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
if (document != null) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
source = reader.read(type, document);
|
||||
}
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
entity = maybeCallAfterConvert(entity, document, collectionName);
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
source = maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
|
||||
return entity;
|
||||
return source;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3239,15 +3197,17 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
*/
|
||||
private class ProjectingReadCallback<S, T> implements DocumentCallback<T> {
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
private final EntityProjection<T, S> projection;
|
||||
private final EntityReader<Object, Bson> reader;
|
||||
private final Class<S> entityType;
|
||||
private final Class<T> targetType;
|
||||
private final String collectionName;
|
||||
|
||||
ProjectingReadCallback(MongoConverter mongoConverter, EntityProjection<T, S> projection,
|
||||
ProjectingReadCallback(EntityReader<Object, Bson> reader, Class<S> entityType, Class<T> targetType,
|
||||
String collectionName) {
|
||||
|
||||
this.mongoConverter = mongoConverter;
|
||||
this.projection = projection;
|
||||
this.reader = reader;
|
||||
this.entityType = entityType;
|
||||
this.targetType = targetType;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
@@ -3256,22 +3216,27 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public T doWith(Document document) {
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
if (document == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, projection.getMappedType().getType(), collectionName));
|
||||
Class<?> typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType
|
||||
: targetType;
|
||||
|
||||
Object entity = mongoConverter.project(projection, document);
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName));
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", mongoConverter));
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
if (result != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
|
||||
result = maybeCallAfterConvert(result, document, collectionName);
|
||||
}
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
return (T) maybeCallAfterConvert(entity, document, collectionName);
|
||||
return (T) result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3408,7 +3373,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.metric = metric;
|
||||
}
|
||||
|
||||
public GeoResult<T> doWith(Document object) {
|
||||
@Nullable
|
||||
public GeoResult<T> doWith(@Nullable Document object) {
|
||||
|
||||
double distance = Double.NaN;
|
||||
if (object.containsKey(distanceField)) {
|
||||
@@ -3435,6 +3401,10 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
/**
|
||||
* Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}.
|
||||
*
|
||||
* @param cursor
|
||||
* @param exceptionTranslator
|
||||
* @param objectReadCallback
|
||||
*/
|
||||
CloseableIterableCursorAdapter(MongoIterable<Document> cursor, PersistenceExceptionTranslator exceptionTranslator,
|
||||
DocumentCallback<T> objectReadCallback) {
|
||||
@@ -3478,7 +3448,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
try {
|
||||
Document item = cursor.next();
|
||||
return objectReadCallback.doWith(item);
|
||||
T converted = objectReadCallback.doWith(item);
|
||||
return converted;
|
||||
} catch (RuntimeException ex) {
|
||||
throw potentiallyConvertRuntimeException(ex, exceptionTranslator);
|
||||
}
|
||||
@@ -3524,7 +3495,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
/**
|
||||
* {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the
|
||||
* server through the driver API.
|
||||
* <br />
|
||||
* <p />
|
||||
* The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired
|
||||
* target method matching the actual arguments plus a {@link ClientSession}.
|
||||
*
|
||||
|
||||
@@ -16,19 +16,18 @@
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.util.Predicates;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.ProjectionInformation;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* Common operations performed on properties of an entity like extracting fields information for projection creation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
class PropertyOperations {
|
||||
@@ -41,37 +40,37 @@ class PropertyOperations {
|
||||
|
||||
/**
|
||||
* For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for
|
||||
* creating the projection (target) type if the {@code EntityProjection} is a {@literal DTO projection} or a
|
||||
* creating the projection (target) type if the {@code targetType} is a {@literal DTO projection} or a
|
||||
* {@literal closed interface projection}.
|
||||
*
|
||||
* @param projection must not be {@literal null}.
|
||||
* @param projectionFactory must not be {@literal null}.
|
||||
* @param fields must not be {@literal null}.
|
||||
* @param domainType must not be {@literal null}.
|
||||
* @param targetType must not be {@literal null}.
|
||||
* @return {@link Document} with fields to be included.
|
||||
*/
|
||||
Document computeMappedFieldsForProjection(EntityProjection<?, ?> projection,
|
||||
Document fields) {
|
||||
Document computeFieldsForProjection(ProjectionFactory projectionFactory, Document fields, Class<?> domainType,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (!projection.isClosedProjection()) {
|
||||
if (!fields.isEmpty() || ClassUtils.isAssignable(domainType, targetType)) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = new Document();
|
||||
|
||||
if (projection.getMappedType().getType().isInterface()) {
|
||||
projection.forEach(it -> {
|
||||
projectedFields.put(it.getPropertyPath().getSegment(), 1);
|
||||
});
|
||||
if (targetType.isInterface()) {
|
||||
|
||||
ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType);
|
||||
|
||||
if (projectionInformation.isClosed()) {
|
||||
projectionInformation.getInputProperties().forEach(it -> projectedFields.append(it.getName(), 1));
|
||||
}
|
||||
} else {
|
||||
|
||||
// DTO projections use merged metadata between domain type and result type
|
||||
PersistentPropertyTranslator translator = PersistentPropertyTranslator.create(
|
||||
mappingContext.getRequiredPersistentEntity(projection.getDomainType()),
|
||||
Predicates.negate(MongoPersistentProperty::hasExplicitFieldName));
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getRequiredPersistentEntity(projection.getMappedType());
|
||||
for (MongoPersistentProperty property : persistentEntity) {
|
||||
projectedFields.put(translator.translate(property).getFieldName(), 1);
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(targetType);
|
||||
if (entity != null) {
|
||||
entity.doWithProperties(
|
||||
(SimplePropertyHandler) persistentProperty -> projectedFields.append(persistentProperty.getName(), 1));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,6 @@ import java.util.stream.Collectors;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -55,10 +54,11 @@ import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
@@ -288,59 +288,45 @@ class QueryOperations {
|
||||
return queryMapper.getMappedObject(getQueryObject(), entity);
|
||||
}
|
||||
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity,
|
||||
EntityProjection<?, ?> projection) {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
|
||||
Document fields = evaluateFields(entity);
|
||||
Document fields = new Document();
|
||||
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document mappedFields;
|
||||
if (!fields.isEmpty()) {
|
||||
mappedFields = queryMapper.getMappedFields(fields, entity);
|
||||
} else {
|
||||
mappedFields = propertyOperations.computeMappedFieldsForProjection(projection, fields);
|
||||
mappedFields = queryMapper.addMetaAttributes(mappedFields, entity);
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && mappedFields.containsKey(entity.getTextScoreProperty().getFieldName())
|
||||
&& !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
if (mappedFields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
private Document evaluateFields(@Nullable MongoPersistentEntity<?> entity) {
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
|
||||
if (fields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
Document evaluated = new Document();
|
||||
|
||||
for (Entry<String, Object> entry : fields.entrySet()) {
|
||||
for (Entry<String, Object> entry : query.getFieldsObject().entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof MongoExpression) {
|
||||
|
||||
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
|
||||
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
|
||||
|
||||
evaluated.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||
fields.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||
} else {
|
||||
evaluated.put(entry.getKey(), entry.getValue());
|
||||
fields.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
return evaluated;
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields, entity);
|
||||
} else {
|
||||
mappedFields = queryMapper.getMappedFields(projectedFields,
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
return mappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -402,8 +388,8 @@ class QueryOperations {
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity,
|
||||
EntityProjection<?, ?> projection) {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ public interface ReactiveChangeStreamOperation {
|
||||
/**
|
||||
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
|
||||
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
|
||||
*/
|
||||
Flux<ChangeStreamEvent<T>> listen();
|
||||
|
||||
@@ -91,10 +91,10 @@ public interface ReactiveFindOperation {
|
||||
* Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will
|
||||
* not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the
|
||||
* document at the "end" of the collection and then the application deletes that document.
|
||||
* <br />
|
||||
* <p />
|
||||
* A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the
|
||||
* streams will linger and exhaust resources. <br/>
|
||||
* <strong>NOTE:</strong> Requires a capped collection.
|
||||
@@ -106,7 +106,7 @@ public interface ReactiveFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but
|
||||
|
||||
@@ -59,7 +59,7 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
* Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability
|
||||
* (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using
|
||||
* {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -121,7 +121,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not
|
||||
@@ -133,7 +133,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
@@ -145,7 +145,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveCollectionCallback} on the collection of the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be
|
||||
@@ -159,7 +159,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
@@ -178,7 +178,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession}
|
||||
* with given {@literal sessionOptions} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
@@ -192,7 +192,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the
|
||||
* {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
@@ -205,7 +205,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -218,7 +218,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Initiate a new {@link ClientSession} and obtain a {@link ClientSession session} bound instance of
|
||||
* {@link ReactiveSessionScoped}. Starts the transaction and adds the {@link ClientSession} to each and every command
|
||||
* issued against MongoDB.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction
|
||||
* that is {@link ClientSession#commitTransaction() committed} on success. Transactions are
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
@@ -233,7 +233,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped}, start the transaction and
|
||||
* bind the {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against
|
||||
* MongoDB.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction
|
||||
* that is {@link ClientSession#commitTransaction() committed} on success. Transactions are
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
@@ -293,7 +293,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection.
|
||||
@@ -303,7 +303,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the name of the collection. Must not be {@literal null}.
|
||||
@@ -313,7 +313,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a given name exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -323,7 +323,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the name indicated by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}.
|
||||
@@ -332,7 +332,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection to drop/delete.
|
||||
@@ -341,10 +341,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a {@link Flux} of objects of type T from the collection used by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -355,10 +355,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a {@link Flux} of objects of type T from the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -371,10 +371,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
|
||||
* specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -388,10 +388,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -435,10 +435,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -451,10 +451,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -566,10 +566,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The raw results will be mapped to the given entity class and are returned as stream. The name of the
|
||||
* inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with
|
||||
* {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause
|
||||
* {@link IllegalArgumentException}.
|
||||
@@ -584,10 +584,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the
|
||||
* {@code inputType}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with
|
||||
* {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause
|
||||
* {@link IllegalArgumentException}.
|
||||
@@ -604,9 +604,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with
|
||||
* {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause
|
||||
* {@link IllegalArgumentException}.
|
||||
@@ -676,7 +676,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -691,7 +691,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -707,7 +707,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -725,7 +725,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -746,7 +746,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
@@ -764,7 +764,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
@@ -783,7 +783,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -803,7 +803,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -825,7 +825,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -849,7 +849,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -876,7 +876,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -902,9 +902,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
* database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -918,10 +918,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type. The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -940,7 +940,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
@@ -962,7 +962,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
@@ -983,7 +983,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* <p />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
@@ -1001,7 +1001,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
@@ -1017,7 +1017,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
@@ -1029,17 +1029,17 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1050,12 +1050,12 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1094,15 +1094,15 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1140,16 +1140,16 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1161,14 +1161,15 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
@@ -1180,14 +1181,15 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation"> Spring's Type Conversion</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1197,16 +1199,17 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}.
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
*/
|
||||
@@ -1478,10 +1481,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite
|
||||
* stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1497,10 +1500,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite
|
||||
* stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1517,10 +1520,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to
|
||||
* filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <br />
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
@@ -1541,10 +1544,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter
|
||||
* events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <br />
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
@@ -1566,10 +1569,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Subscribe to a MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> via the reactive
|
||||
* infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed
|
||||
* unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <br />
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
|
||||
@@ -17,35 +17,25 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.reactivestreams.Subscriber;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
@@ -61,7 +51,6 @@ import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -111,9 +100,9 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -165,7 +154,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
public static final DbRefResolver NO_OP_REF_RESOLVER = NoOpDbRefResolver.INSTANCE;
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(ReactiveMongoTemplate.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class);
|
||||
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
@@ -175,6 +164,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final JsonSchemaMapper schemaMapper;
|
||||
private final SpelAwareProxyProjectionFactory projectionFactory;
|
||||
private final ApplicationListener<MappingContextEvent<?, ?>> indexCreatorListener;
|
||||
private final EntityOperations operations;
|
||||
private final PropertyOperations propertyOperations;
|
||||
@@ -241,12 +231,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
this.queryMapper = new QueryMapper(this.mongoConverter);
|
||||
this.updateMapper = new UpdateMapper(this.mongoConverter);
|
||||
this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter);
|
||||
this.projectionFactory = new SpelAwareProxyProjectionFactory();
|
||||
this.indexCreatorListener = new IndexCreatorEventListener(subscriptionExceptionHandler);
|
||||
|
||||
// We always have a mapping context in the converter, whether it's a simple one or not
|
||||
this.mappingContext = this.mongoConverter.getMappingContext();
|
||||
this.operations = new EntityOperations(this.mongoConverter);
|
||||
this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext());
|
||||
this.operations = new EntityOperations(this.mappingContext);
|
||||
this.propertyOperations = new PropertyOperations(this.mappingContext);
|
||||
this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations,
|
||||
mongoDatabaseFactory);
|
||||
|
||||
@@ -274,6 +265,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
this.queryMapper = that.queryMapper;
|
||||
this.updateMapper = that.updateMapper;
|
||||
this.schemaMapper = that.schemaMapper;
|
||||
this.projectionFactory = that.projectionFactory;
|
||||
this.indexCreator = that.indexCreator;
|
||||
this.indexCreatorListener = that.indexCreatorListener;
|
||||
this.mappingContext = that.mappingContext;
|
||||
@@ -350,13 +342,16 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
if (mappingContext instanceof ApplicationEventPublisherAware) {
|
||||
((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher);
|
||||
}
|
||||
|
||||
projectionFactory.setBeanFactory(applicationContext);
|
||||
projectionFactory.setBeanClassLoader(applicationContext.getClassLoader());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link ReactiveEntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the
|
||||
* {@link ReactiveBeforeSaveCallback}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Overrides potentially existing {@link ReactiveEntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
@@ -660,7 +655,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<MongoCollection<Document>> createCollection(Class<T> entityClass) {
|
||||
return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions());
|
||||
return createCollection(entityClass, CollectionOptions.empty());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -823,7 +818,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
.projection(new Document("_id", 1));
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("exists: %s in collection: %s", serializeToJsonSafely(filter), collectionName));
|
||||
LOGGER.debug("exists: {} in collection: {}", serializeToJsonSafely(filter), collectionName);
|
||||
}
|
||||
|
||||
queryContext.applyCollation(entityClass, findPublisher::collation);
|
||||
@@ -905,8 +900,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Flux<?> result = execute(collectionName, collection -> {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s",
|
||||
serializeToJsonSafely(mappedQuery), field, collectionName));
|
||||
LOGGER.debug("Executing findDistinct using query {} for field: {} in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), field, collectionName);
|
||||
}
|
||||
|
||||
FindPublisherPreparer preparer = new QueryFindPublisherPreparer(query, entityClass);
|
||||
@@ -969,8 +964,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return doAggregate(aggregation, collectionName, null, outputType);
|
||||
}
|
||||
|
||||
protected <O> Flux<O> doAggregate(Aggregation aggregation, String collectionName, @Nullable Class<?> inputType,
|
||||
Class<O> outputType) {
|
||||
protected <O> Flux<O> doAggregate(Aggregation aggregation, String collectionName, @Nullable Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
@@ -982,18 +976,19 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Streaming aggregation: %s in collection %s",
|
||||
serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName));
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName);
|
||||
}
|
||||
|
||||
ReadDocumentCallback<O> readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName);
|
||||
return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(),
|
||||
ctx.isOutOrMerge(), options, readCallback, ctx.getInputType()));
|
||||
return execute(collectionName,
|
||||
collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), ctx.isOutOrMerge(), options,
|
||||
readCallback,
|
||||
ctx.getInputType()));
|
||||
}
|
||||
|
||||
private <O> Flux<O> aggregateAndMap(MongoCollection<Document> collection, List<Document> pipeline,
|
||||
boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback<O> readCallback,
|
||||
@Nullable Class<?> inputType) {
|
||||
boolean isOutOrMerge,
|
||||
AggregationOptions options, ReadDocumentCallback<O> readCallback, @Nullable Class<?> inputType) {
|
||||
|
||||
AggregatePublisher<Document> cursor = collection.aggregate(pipeline, Document.class)
|
||||
.allowDiskUse(options.isAllowDiskUse());
|
||||
@@ -1052,11 +1047,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(entityClass);
|
||||
String distanceField = operations.nearQueryDistanceFieldName(entityClass);
|
||||
EntityProjection<T, ?> projection = operations.introspectProjection(returnType,
|
||||
entityClass);
|
||||
|
||||
GeoNearResultDocumentCallback<T> callback = new GeoNearResultDocumentCallback<>(distanceField,
|
||||
new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric());
|
||||
new ProjectingReadCallback<>(mongoConverter, entityClass, returnType, collection), near.getMetric());
|
||||
|
||||
Aggregation $geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, distanceField))
|
||||
.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
|
||||
@@ -1135,11 +1128,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityType);
|
||||
QueryContext queryContext = queryOperations.createQueryContext(query);
|
||||
EntityProjection<T, S> projection = operations.introspectProjection(resultType,
|
||||
entityType);
|
||||
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, projection);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, resultType, projectionFactory);
|
||||
Document mappedSort = queryContext.getMappedSort(entity);
|
||||
|
||||
return Mono.defer(() -> {
|
||||
@@ -1159,8 +1150,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}).flatMap(it -> {
|
||||
|
||||
Mono<T> afterFindAndReplace = doFindAndReplace(it.getCollection(), mappedQuery, mappedFields, mappedSort,
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), options,
|
||||
projection);
|
||||
queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), options, resultType);
|
||||
return afterFindAndReplace.flatMap(saved -> {
|
||||
maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), it.getCollection()));
|
||||
return maybeCallAfterSave(saved, it.getTarget(), it.getCollection());
|
||||
@@ -1225,8 +1215,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Document filter = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(
|
||||
String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName));
|
||||
LOGGER.debug("Executing count: {} in collection: {}", serializeToJsonSafely(filter), collectionName);
|
||||
}
|
||||
|
||||
return doCount(collectionName, filter, options);
|
||||
@@ -1556,8 +1545,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
protected Mono<Object> insertDocument(String collectionName, Document dbDoc, Class<?> entityClass) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String
|
||||
.format("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName));
|
||||
LOGGER.debug("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName);
|
||||
}
|
||||
|
||||
Document document = new Document(dbDoc);
|
||||
@@ -1583,7 +1571,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Inserting list of Documents containing %d items", dbDocList.size()));
|
||||
LOGGER.debug("Inserting list of Documents containing " + dbDocList.size() + " items");
|
||||
}
|
||||
|
||||
List<Document> documents = new ArrayList<>();
|
||||
@@ -1621,7 +1609,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
protected Mono<Object> saveDocument(String collectionName, Document document, Class<?> entityClass) {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Saving Document containing fields: %s", document.keySet()));
|
||||
LOGGER.debug("Saving Document containing fields: " + document.keySet());
|
||||
}
|
||||
|
||||
return createMono(collectionName, collection -> {
|
||||
@@ -1748,8 +1736,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
if (query.isSorted() && LOGGER.isWarnEnabled()) {
|
||||
|
||||
LOGGER.warn(String.format("%s does not support sort ('%s'). Please use findAndModify() instead.",
|
||||
upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject())));
|
||||
LOGGER.warn("{} does not support sort ('{}'). Please use findAndModify() instead.",
|
||||
upsert ? "Upsert" : "UpdateFirst", serializeToJsonSafely(query.getSortObject()));
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = entityClass == null ? null : getPersistentEntity(entityClass);
|
||||
@@ -1960,8 +1948,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
MongoCollection<Document> collectionToUse = prepareCollection(collection, writeConcernToUse);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery),
|
||||
collectionName));
|
||||
LOGGER.debug("Remove using query: {} in collection: {}.",
|
||||
new Object[] { serializeToJsonSafely(removeQuery), collectionName });
|
||||
}
|
||||
|
||||
if (query.getLimit() > 0 || query.getSkip() > 0) {
|
||||
@@ -2329,7 +2317,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
// TODO: Emit a collection created event
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("Created collection [%s]", collectionName));
|
||||
LOGGER.debug("Created collection [{}]", collectionName);
|
||||
}
|
||||
|
||||
}).then(getCollection(collectionName));
|
||||
@@ -2372,8 +2360,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
QueryContext queryContext = queryOperations
|
||||
.createQueryContext(new BasicQuery(query, fields != null ? fields : new Document()));
|
||||
Document mappedFields = queryContext.getMappedFields(entity,
|
||||
EntityProjection.nonProjecting(entityClass));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
@@ -2425,8 +2412,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(entityClass);
|
||||
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity,
|
||||
EntityProjection.nonProjecting(entityClass));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, entityClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
@@ -2448,20 +2434,35 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Class<T> targetClass, FindPublisherPreparer preparer) {
|
||||
|
||||
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(sourceClass);
|
||||
EntityProjection<T, S> projection = operations.introspectProjection(targetClass,
|
||||
sourceClass);
|
||||
|
||||
QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields));
|
||||
Document mappedFields = queryContext.getMappedFields(entity, projection);
|
||||
Document mappedFields = queryContext.getMappedFields(entity, targetClass, projectionFactory);
|
||||
Document mappedQuery = queryContext.getMappedQuery(entity);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName));
|
||||
LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName);
|
||||
}
|
||||
|
||||
return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer,
|
||||
new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName);
|
||||
new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName);
|
||||
}
|
||||
|
||||
private Document getMappedFieldsObject(Document fields, @Nullable MongoPersistentEntity<?> entity,
|
||||
Class<?> targetType) {
|
||||
|
||||
if (entity == null) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
Document projectedFields = propertyOperations.computeFieldsForProjection(projectionFactory, fields,
|
||||
entity.getType(), targetType);
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(fields, projectedFields)) {
|
||||
return queryMapper.getMappedFields(projectedFields, entity);
|
||||
}
|
||||
|
||||
return queryMapper.getMappedFields(projectedFields, mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions) {
|
||||
@@ -2494,20 +2495,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
result.validationOptions(validationOptions);
|
||||
});
|
||||
|
||||
collectionOptions.getTimeSeriesOptions().map(operations.forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> {
|
||||
|
||||
TimeSeriesOptions options = new TimeSeriesOptions(it.getTimeField());
|
||||
|
||||
if (StringUtils.hasText(it.getMetaField())) {
|
||||
options.metaField(it.getMetaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(it.getGranularity())) {
|
||||
options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase()));
|
||||
}
|
||||
|
||||
result.timeSeriesOptions(options);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -2525,7 +2512,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter.
|
||||
* The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query document is specified as a standard Document and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
@@ -2596,46 +2583,19 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
Document mappedSort, com.mongodb.client.model.Collation collation, Class<?> entityType, Document replacement,
|
||||
FindAndReplaceOptions options, Class<T> resultType) {
|
||||
|
||||
EntityProjection<T, ?> projection = operations.introspectProjection(resultType,
|
||||
entityType);
|
||||
|
||||
return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort, collation, entityType, replacement,
|
||||
options, projection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Customize this part for findAndReplace.
|
||||
*
|
||||
* @param collectionName The name of the collection to perform the operation in.
|
||||
* @param mappedQuery the query to look up documents.
|
||||
* @param mappedFields the fields to project the result to.
|
||||
* @param mappedSort the sort to be applied when executing the query.
|
||||
* @param collation collation settings for the query. Can be {@literal null}.
|
||||
* @param entityType the source domain type.
|
||||
* @param replacement the replacement {@link Document}.
|
||||
* @param options applicable options.
|
||||
* @param projection the projection descriptor.
|
||||
* @return {@link Mono#empty()} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is
|
||||
* {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}.
|
||||
* @since 3.4
|
||||
*/
|
||||
private <T> Mono<T> doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields,
|
||||
Document mappedSort, com.mongodb.client.model.Collation collation, Class<?> entityType, Document replacement,
|
||||
FindAndReplaceOptions options, EntityProjection<T, ?> projection) {
|
||||
|
||||
return Mono.defer(() -> {
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug(String.format(
|
||||
"findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s "
|
||||
+ "in collection: %s",
|
||||
LOGGER.debug(
|
||||
"findAndReplace using query: {} fields: {} sort: {} for class: {} and replacement: {} "
|
||||
+ "in collection: {}",
|
||||
serializeToJsonSafely(mappedQuery), mappedFields, mappedSort, entityType,
|
||||
serializeToJsonSafely(replacement), collectionName));
|
||||
serializeToJsonSafely(replacement), collectionName);
|
||||
}
|
||||
|
||||
return executeFindOneInternal(
|
||||
new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options),
|
||||
new ProjectingReadCallback<>(this.mongoConverter, projection, collectionName), collectionName);
|
||||
new ProjectingReadCallback<>(this.mongoConverter, entityType, resultType, collectionName), collectionName);
|
||||
|
||||
});
|
||||
}
|
||||
@@ -2913,9 +2873,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
|
||||
LOGGER.debug(
|
||||
String.format("findOne using query: %s fields: %s in db.collection: %s", serializeToJsonSafely(query),
|
||||
serializeToJsonSafely(fields.orElseGet(Document::new)), collection.getNamespace().getFullName()));
|
||||
LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query),
|
||||
serializeToJsonSafely(fields.orElseGet(Document::new)), collection.getNamespace().getFullName());
|
||||
}
|
||||
|
||||
FindPublisher<Document> publisher = preparer.initiateFind(collection, col -> col.find(query, Document.class));
|
||||
@@ -3193,14 +3152,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
|
||||
T entity = reader.read(type, document);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
T source = reader.read(type, document);
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
return maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
return maybeCallAfterConvert(entity, document, collectionName);
|
||||
return Mono.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3216,32 +3174,38 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
private class ProjectingReadCallback<S, T> implements DocumentCallback<T> {
|
||||
|
||||
private final MongoConverter reader;
|
||||
private final EntityProjection<T, S> projection;
|
||||
private final EntityReader<Object, Bson> reader;
|
||||
private final Class<S> entityType;
|
||||
private final Class<T> targetType;
|
||||
private final String collectionName;
|
||||
|
||||
ProjectingReadCallback(MongoConverter reader, EntityProjection<T, S> projection,
|
||||
ProjectingReadCallback(EntityReader<Object, Bson> reader, Class<S> entityType, Class<T> targetType,
|
||||
String collectionName) {
|
||||
this.reader = reader;
|
||||
this.projection = projection;
|
||||
this.entityType = entityType;
|
||||
this.targetType = targetType;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Mono<T> doWith(Document document) {
|
||||
|
||||
Class<T> returnType = projection.getMappedType().getType();
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, returnType, collectionName));
|
||||
Class<?> typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) //
|
||||
? entityType //
|
||||
: targetType;
|
||||
|
||||
Object entity = reader.project(projection, document);
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, typeToRead, collectionName));
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
T castEntity = (T) result;
|
||||
if (castEntity != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
}
|
||||
|
||||
T castEntity = (T) entity;
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
return Mono.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3399,7 +3363,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
/**
|
||||
* {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the
|
||||
* server through the driver API.
|
||||
* <br />
|
||||
* <p />
|
||||
* The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired
|
||||
* target method matching the actual arguments plus a {@link ClientSession}.
|
||||
*
|
||||
|
||||
@@ -32,7 +32,7 @@ public interface ReactiveSessionCallback<T> {
|
||||
/**
|
||||
* Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is
|
||||
* inferred directly into the operation so that no further interaction is necessary.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and
|
||||
* others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway
|
||||
* objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or
|
||||
|
||||
@@ -33,7 +33,7 @@ public interface ReactiveSessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
@@ -47,7 +47,7 @@ public interface ReactiveSessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
|
||||
@@ -23,7 +23,7 @@ import org.springframework.lang.Nullable;
|
||||
|
||||
|
||||
/**
|
||||
* Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions.
|
||||
* Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
@@ -72,10 +72,10 @@ public interface ScriptOperations {
|
||||
Object call(String scriptName, Object... args);
|
||||
|
||||
/**
|
||||
* Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name.
|
||||
* Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name.
|
||||
*
|
||||
* @param scriptName must not be {@literal null} or empty.
|
||||
* @return false if no {@literal ServerSideJavaScript} with given name exists.
|
||||
* @return false if no {@link ServerSideJavaScript} with given name exists.
|
||||
*/
|
||||
boolean exists(String scriptName);
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ public interface SessionCallback<T> {
|
||||
/**
|
||||
* Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred
|
||||
* directly into the operation so that no further interaction is necessary.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others
|
||||
* are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like
|
||||
* {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg.
|
||||
|
||||
@@ -23,7 +23,7 @@ import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -34,7 +34,7 @@ public interface SessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
@@ -49,7 +49,7 @@ public interface SessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
|
||||
@@ -142,118 +142,11 @@ public class AccumulatorOperators {
|
||||
return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the
|
||||
* given field to calculate the population covariance of the two.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovariancePop covariancePop(String fieldReference) {
|
||||
return covariancePop().and(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the
|
||||
* given {@link AggregationExpression expression} to calculate the population covariance of the two.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovariancePop covariancePop(AggregationExpression expression) {
|
||||
return covariancePop().and(expression);
|
||||
}
|
||||
|
||||
private CovariancePop covariancePop() {
|
||||
return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the
|
||||
* given field to calculate the sample covariance of the two.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovarianceSamp covarianceSamp(String fieldReference) {
|
||||
return covarianceSamp().and(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the
|
||||
* given {@link AggregationExpression expression} to calculate the sample covariance of the two.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovarianceSamp covarianceSamp(AggregationExpression expression) {
|
||||
return covarianceSamp().and(expression);
|
||||
}
|
||||
|
||||
private CovarianceSamp covarianceSamp() {
|
||||
return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference)
|
||||
: CovarianceSamp.covarianceSampOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates
|
||||
* the exponential moving average of numeric values
|
||||
*
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public ExpMovingAvgBuilder expMovingAvg() {
|
||||
|
||||
ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference)
|
||||
: ExpMovingAvg.expMovingAvgOf(expression);
|
||||
return new ExpMovingAvgBuilder() {
|
||||
|
||||
@Override
|
||||
public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) {
|
||||
return expMovingAvg.n(numberOfHistoricalDocuments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExpMovingAvg alpha(double exponentialDecayValue) {
|
||||
return expMovingAvg.alpha(exponentialDecayValue);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link ExpMovingAvg}.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface ExpMovingAvgBuilder {
|
||||
|
||||
/**
|
||||
* Define the number of historical documents with significant mathematical weight.
|
||||
*
|
||||
* @param numberOfHistoricalDocuments
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments);
|
||||
|
||||
/**
|
||||
* Define the exponential decay value.
|
||||
*
|
||||
* @param exponentialDecayValue
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
ExpMovingAvg alpha(double exponentialDecayValue);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $sum}.
|
||||
*
|
||||
@@ -765,185 +658,4 @@ public class AccumulatorOperators {
|
||||
return super.toDocument(value, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $covariancePop}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class CovariancePop extends AbstractAggregationExpression {
|
||||
|
||||
private CovariancePop(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public static CovariancePop covariancePopOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new CovariancePop(asFields(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public static CovariancePop covariancePopOf(AggregationExpression expression) {
|
||||
return new CovariancePop(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public CovariancePop and(String fieldReference) {
|
||||
return new CovariancePop(append(asFields(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public CovariancePop and(AggregationExpression expression) {
|
||||
return new CovariancePop(append(expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$covariancePop";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $covarianceSamp}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class CovarianceSamp extends AbstractAggregationExpression {
|
||||
|
||||
private CovarianceSamp(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public static CovarianceSamp covarianceSampOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new CovarianceSamp(asFields(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public static CovarianceSamp covarianceSampOf(AggregationExpression expression) {
|
||||
return new CovarianceSamp(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public CovarianceSamp and(String fieldReference) {
|
||||
return new CovarianceSamp(append(asFields(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public CovarianceSamp and(AggregationExpression expression) {
|
||||
return new CovarianceSamp(append(expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$covarianceSamp";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ExpMovingAvg} calculates the exponential moving average of numeric values.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class ExpMovingAvg extends AbstractAggregationExpression {
|
||||
|
||||
private ExpMovingAvg(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public static ExpMovingAvg expMovingAvgOf(String fieldReference) {
|
||||
return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value
|
||||
* to be used as input.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) {
|
||||
return new ExpMovingAvg(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the number of historical documents with significant mathematical weight. <br />
|
||||
* Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both!
|
||||
*
|
||||
* @param numberOfHistoricalDocuments
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) {
|
||||
return new ExpMovingAvg(append("N", numberOfHistoricalDocuments));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the exponential decay value. <br />
|
||||
* Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both!
|
||||
*
|
||||
* @param exponentialDecayValue
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public ExpMovingAvg alpha(double exponentialDecayValue) {
|
||||
return new ExpMovingAvg(append("alpha", exponentialDecayValue));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$expMovingAvg";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,5 +201,4 @@ public class AddFieldsOperation extends DocumentEnhancingOperation {
|
||||
AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -227,7 +227,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is
|
||||
* an alias for {@code $addFields}.
|
||||
*
|
||||
@@ -499,17 +499,6 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} using the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MatchOperation}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static MatchOperation match(AggregationExpression expression) {
|
||||
return new MatchOperation(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
@@ -726,7 +715,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Converts this {@link Aggregation} specification to a {@link Document}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render
|
||||
* an aggregation pipeline.
|
||||
*
|
||||
|
||||
@@ -24,15 +24,15 @@ import org.springframework.util.Assert;
|
||||
* expression</a>. <br />
|
||||
* <br />
|
||||
* <strong>Samples:</strong> <br />
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre>
|
||||
* // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
|
||||
* expressionOf("qty > 100 && qty < 250);
|
||||
*
|
||||
* // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
|
||||
* expressionOf("cond(a >= 42, 'answer', 'no-answer')");
|
||||
* </code>
|
||||
* </pre>
|
||||
* </code>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
|
||||
@@ -71,7 +71,8 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline">MongoDB
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline">MongoDB
|
||||
* Reference Documentation</a>
|
||||
* @since 3.0
|
||||
*/
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -28,7 +28,8 @@ import org.springframework.util.Assert;
|
||||
* We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating
|
||||
* instances of this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/">https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/">https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/</a>
|
||||
* @see BucketOperationSupport
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -247,7 +248,8 @@ public class BucketAutoOperation extends BucketOperationSupport<BucketAutoOperat
|
||||
/**
|
||||
* Supported MongoDB granularities.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity">https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity</a>
|
||||
* @see <a
|
||||
* href="https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity>https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity</a>
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public enum Granularities implements Granularity {
|
||||
|
||||
@@ -31,7 +31,8 @@ import org.springframework.util.Assert;
|
||||
* We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of
|
||||
* this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.org/manual/reference/aggregation/bucket/">https://docs.mongodb.org/manual/reference/aggregation/bucket/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/reference/aggregation/bucket/">https://docs.mongodb.org/manual/reference/aggregation/bucket/</a>
|
||||
* @see BucketOperationSupport
|
||||
* @author Mark Paluch
|
||||
* @since 1.10
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@@ -236,7 +235,7 @@ public class ConditionalOperators {
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/</a>
|
||||
*/
|
||||
public static class IfNull implements AggregationExpression {
|
||||
|
||||
@@ -252,8 +251,7 @@ public class ConditionalOperators {
|
||||
/**
|
||||
* Creates new {@link IfNull}.
|
||||
*
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static ThenBuilder ifNull(String fieldReference) {
|
||||
@@ -266,7 +264,7 @@ public class ConditionalOperators {
|
||||
* Creates new {@link IfNull}.
|
||||
*
|
||||
* @param expression the expression to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static ThenBuilder ifNull(AggregationExpression expression) {
|
||||
@@ -284,29 +282,19 @@ public class ConditionalOperators {
|
||||
|
||||
List<Object> list = new ArrayList<Object>();
|
||||
|
||||
if (condition instanceof Collection) {
|
||||
for (Object val : ((Collection) this.condition)) {
|
||||
list.add(mapCondition(val, context));
|
||||
}
|
||||
if (condition instanceof Field) {
|
||||
list.add(context.getReference((Field) condition).toString());
|
||||
} else if (condition instanceof AggregationExpression) {
|
||||
list.add(((AggregationExpression) condition).toDocument(context));
|
||||
} else {
|
||||
list.add(mapCondition(condition, context));
|
||||
list.add(condition);
|
||||
}
|
||||
|
||||
list.add(resolve(value, context));
|
||||
|
||||
return new Document("$ifNull", list);
|
||||
}
|
||||
|
||||
private Object mapCondition(Object condition, AggregationOperationContext context) {
|
||||
|
||||
if (condition instanceof Field) {
|
||||
return context.getReference((Field) condition).toString();
|
||||
} else if (condition instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) condition).toDocument(context);
|
||||
} else {
|
||||
return condition;
|
||||
}
|
||||
}
|
||||
|
||||
private Object resolve(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
@@ -327,48 +315,28 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* {@literal null}.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder ifNull(String fieldReference);
|
||||
|
||||
/**
|
||||
* @param expression the expression to check for a {@literal null} value, field name must not be {@literal null}
|
||||
* or empty.
|
||||
* @return the {@link ThenBuilder}.
|
||||
* or empty.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder ifNull(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface OrBuilder {
|
||||
|
||||
/**
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder orIfNull(String fieldReference);
|
||||
|
||||
/**
|
||||
* @param expression the expression to check for a {@literal null} value,
|
||||
* @return the {@link ThenBuilder}.
|
||||
*/
|
||||
ThenBuilder orIfNull(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public interface ThenBuilder extends OrBuilder {
|
||||
public interface ThenBuilder {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a
|
||||
* {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB
|
||||
* representation but must not be {@literal null}.
|
||||
* {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB
|
||||
* representation but must not be {@literal null}.
|
||||
* @return new instance of {@link IfNull}.
|
||||
*/
|
||||
IfNull then(Object value);
|
||||
@@ -393,10 +361,9 @@ public class ConditionalOperators {
|
||||
*/
|
||||
static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder {
|
||||
|
||||
private @Nullable List<Object> conditions;
|
||||
private @Nullable Object condition;
|
||||
|
||||
private IfNullOperatorBuilder() {
|
||||
conditions = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -414,7 +381,7 @@ public class ConditionalOperators {
|
||||
public ThenBuilder ifNull(String fieldReference) {
|
||||
|
||||
Assert.hasText(fieldReference, "FieldReference name must not be null or empty!");
|
||||
this.conditions.add(Fields.field(fieldReference));
|
||||
this.condition = Fields.field(fieldReference);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -425,25 +392,15 @@ public class ConditionalOperators {
|
||||
public ThenBuilder ifNull(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "AggregationExpression name must not be null or empty!");
|
||||
this.conditions.add(expression);
|
||||
this.condition = expression;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ThenBuilder orIfNull(String fieldReference) {
|
||||
return ifNull(fieldReference);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ThenBuilder orIfNull(AggregationExpression expression) {
|
||||
return ifNull(expression);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object)
|
||||
*/
|
||||
public IfNull then(Object value) {
|
||||
return new IfNull(conditions, value);
|
||||
return new IfNull(condition, value);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -452,7 +409,7 @@ public class ConditionalOperators {
|
||||
public IfNull thenValueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new IfNull(conditions, Fields.field(fieldReference));
|
||||
return new IfNull(condition, Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -461,7 +418,7 @@ public class ConditionalOperators {
|
||||
public IfNull thenValueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new IfNull(conditions, expression);
|
||||
return new IfNull(condition, expression);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -501,7 +458,7 @@ public class ConditionalOperators {
|
||||
public static Switch switchCases(List<CaseOperator> conditions) {
|
||||
|
||||
Assert.notNull(conditions, "Conditions must not be null!");
|
||||
return new Switch(Collections.<String, Object> singletonMap("branches", new ArrayList<CaseOperator>(conditions)));
|
||||
return new Switch(Collections.<String, Object>singletonMap("branches", new ArrayList<CaseOperator>(conditions)));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -588,7 +545,7 @@ public class ConditionalOperators {
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/</a>
|
||||
*/
|
||||
public static class Cond implements AggregationExpression {
|
||||
|
||||
@@ -849,8 +806,8 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the condition evaluates {@literal true}. Can be a {@link Document}, a
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* @return the {@link OtherwiseBuilder}
|
||||
*/
|
||||
OtherwiseBuilder then(Object value);
|
||||
@@ -875,8 +832,8 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the condition evaluates {@literal false}. Can be a {@link Document}, a
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* @return the {@link Cond}
|
||||
*/
|
||||
Cond otherwise(Object value);
|
||||
@@ -904,7 +861,8 @@ public class ConditionalOperators {
|
||||
private @Nullable Object condition;
|
||||
private @Nullable Object thenValue;
|
||||
|
||||
private ConditionalExpressionBuilder() {}
|
||||
private ConditionalExpressionBuilder() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new builder for {@link Cond}.
|
||||
|
||||
@@ -231,17 +231,6 @@ public class ConvertOperators {
|
||||
return ToString.toString(valueObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to
|
||||
* radians.
|
||||
*
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public DegreesToRadians convertDegreesToRadians() {
|
||||
return DegreesToRadians.degreesToRadians(valueObject());
|
||||
}
|
||||
|
||||
private Convert createConvert() {
|
||||
return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression);
|
||||
}
|
||||
@@ -328,9 +317,9 @@ public class ConvertOperators {
|
||||
* <dt>1</dt>
|
||||
* <dd>double</dd>
|
||||
* <dt>2</dt>
|
||||
* <dd>string</dd>
|
||||
* <dd>string</li>
|
||||
* <dt>7</dt>
|
||||
* <dd>objectId</dd>
|
||||
* <dd>objectId</li>
|
||||
* <dt>8</dt>
|
||||
* <dd>bool</dd>
|
||||
* <dt>9</dt>
|
||||
@@ -703,52 +692,4 @@ public class ConvertOperators {
|
||||
return "$toString";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DegreesToRadians extends AbstractAggregationExpression {
|
||||
|
||||
private DegreesToRadians(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians.
|
||||
*
|
||||
* @param fieldName must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadiansOf(String fieldName) {
|
||||
return degreesToRadians(Fields.field(fieldName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) {
|
||||
return degreesToRadians(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadians(Object value) {
|
||||
return new DegreesToRadians(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$degreesToRadians";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,8 @@ import org.springframework.util.Assert;
|
||||
* We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class
|
||||
* directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/count/#pipe._S_count">https://docs.mongodb.com/manual/reference/operator/aggregation/count/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/count/#pipe._S_count">https://docs.mongodb.com/manual/reference/operator/aggregation/count/</a>
|
||||
* @author Mark Paluch
|
||||
* @since 1.10
|
||||
*/
|
||||
|
||||
@@ -15,16 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TimeZone;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -52,19 +45,6 @@ public class DateOperators {
|
||||
return new DateOperatorFactory(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the date referenced by given {@literal fieldReference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DateOperatorFactory}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new DateOperatorFactory(fieldReference).withTimezone(timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the date resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
@@ -77,22 +57,9 @@ public class DateOperators {
|
||||
return new DateOperatorFactory(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the date resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DateOperatorFactory}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new DateOperatorFactory(expression).withTimezone(timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the given value as date.
|
||||
* <br />
|
||||
* <p/>
|
||||
* This can be one of:
|
||||
* <ul>
|
||||
* <li>{@link java.util.Date}</li>
|
||||
@@ -142,7 +109,7 @@ public class DateOperators {
|
||||
* Timezone represents a MongoDB timezone abstraction which can be represented with a timezone ID or offset as a
|
||||
* {@link String}. Also accepts a {@link AggregationExpression} or {@link Field} that resolves to a {@link String} of
|
||||
* either Olson Timezone Identifier or a UTC Offset.<br />
|
||||
* <table>
|
||||
* <table valign="top">
|
||||
* <tr>
|
||||
* <th>Format</th>
|
||||
* <th>Example</th>
|
||||
@@ -163,7 +130,6 @@ public class DateOperators {
|
||||
* <strong>NOTE: </strong>Support for timezones in aggregations Requires MongoDB 3.6 or later.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class Timezone {
|
||||
@@ -190,7 +156,7 @@ public class DateOperators {
|
||||
* representing an Olson Timezone Identifier or UTC Offset.
|
||||
*
|
||||
* @param value the plain timezone {@link String}, a {@link Field} holding the timezone or an
|
||||
* {@link AggregationExpression} resulting in the timezone.
|
||||
* {@link AggregationExpression} resulting in the timezone.
|
||||
* @return new instance of {@link Timezone}.
|
||||
*/
|
||||
public static Timezone valueOf(Object value) {
|
||||
@@ -199,61 +165,6 @@ public class DateOperators {
|
||||
return new Timezone(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param timeZone {@link TimeZone} rendering the offset as UTC offset.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromOffset(TimeZone timeZone) {
|
||||
|
||||
Assert.notNull(timeZone, "TimeZone must not be null!");
|
||||
|
||||
return fromOffset(
|
||||
ZoneOffset.ofTotalSeconds(Math.toIntExact(TimeUnit.MILLISECONDS.toSeconds(timeZone.getRawOffset()))));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link ZoneOffset} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param offset {@link ZoneOffset} rendering the offset as UTC offset.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromOffset(ZoneOffset offset) {
|
||||
|
||||
Assert.notNull(offset, "ZoneOffset must not be null!");
|
||||
return new Timezone(offset.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param timeZone {@link Timezone} rendering the offset as zone identifier.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromZone(TimeZone timeZone) {
|
||||
|
||||
Assert.notNull(timeZone, "TimeZone must not be null!");
|
||||
|
||||
return valueOf(timeZone.getID());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the given {@link java.time.ZoneId} rendering the offset as UTC offset.
|
||||
*
|
||||
* @param zoneId {@link ZoneId} rendering the offset as zone identifier.
|
||||
* @return new instance of {@link Timezone}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Timezone fromZone(ZoneId zoneId) {
|
||||
|
||||
Assert.notNull(zoneId, "ZoneId must not be null!");
|
||||
return new Timezone(zoneId.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Timezone} for the {@link Field} reference holding the Olson Timezone Identifier or UTC Offset.
|
||||
*
|
||||
@@ -274,11 +185,6 @@ public class DateOperators {
|
||||
public static Timezone ofExpression(AggregationExpression expression) {
|
||||
return valueOf(expression);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
Object getValue() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -334,7 +240,7 @@ public class DateOperators {
|
||||
|
||||
/**
|
||||
* Creates new {@link DateOperatorFactory} for given {@code value} that resolves to a Date.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <ul>
|
||||
* <li>{@link java.util.Date}</li>
|
||||
* <li>{@link java.util.Calendar}</li>
|
||||
@@ -368,89 +274,6 @@ public class DateOperators {
|
||||
return new DateOperatorFactory(fieldReference, expression, dateValue, timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression
|
||||
* expression} (in {@literal units}).
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(AggregationExpression expression, String unit) {
|
||||
return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression
|
||||
* expression} (in {@literal units}).
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(AggregationExpression expression, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
return applyTimezone(DateAdd.addValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()),
|
||||
timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in
|
||||
* {@literal units}).
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(String fieldReference, String unit) {
|
||||
return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in
|
||||
* {@literal units}).
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateAdd addValueOf(String fieldReference, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(
|
||||
DateAdd.addValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the given value (in {@literal units}).
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return
|
||||
* @since 3.3 new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd add(Object value, String unit) {
|
||||
return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that adds the given value (in {@literal units}).
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return
|
||||
* @since 3.3 new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd add(Object value, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(DateAdd.addValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()),
|
||||
timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and
|
||||
* 366.
|
||||
@@ -481,90 +304,6 @@ public class DateOperators {
|
||||
return applyTimezone(DayOfWeek.dayOfWeek(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date
|
||||
* computed by the given {@link AggregationExpression expression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(AggregationExpression expression, String unit) {
|
||||
return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date
|
||||
* computed by the given {@link AggregationExpression expression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(AggregationExpression expression, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(
|
||||
DateDiff.diffValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored
|
||||
* at the given {@literal field}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(String fieldReference, String unit) {
|
||||
return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored
|
||||
* at the given {@literal field}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diffValueOf(String fieldReference, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(
|
||||
DateDiff.diffValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given
|
||||
* {@literal value}.
|
||||
*
|
||||
* @param value anything the resolves to a valid date. Must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diff(Object value, String unit) {
|
||||
return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given
|
||||
* {@literal value}.
|
||||
*
|
||||
* @param value anything the resolves to a valid date. Must not be {@literal null}.
|
||||
* @param unit the unit of measure. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}. @since 3.3
|
||||
*/
|
||||
public DateDiff diff(Object value, TemporalUnit unit) {
|
||||
|
||||
Assert.notNull(unit, "TemporalUnit must not be null");
|
||||
|
||||
return applyTimezone(DateDiff.diffValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()),
|
||||
timezone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that returns the year portion of a date.
|
||||
*
|
||||
@@ -1741,6 +1480,7 @@ public class DateOperators {
|
||||
} else {
|
||||
clone.put("timezone", ((Timezone) value).value);
|
||||
}
|
||||
|
||||
} else {
|
||||
clone.put(key, value);
|
||||
}
|
||||
@@ -2171,7 +1911,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class DateFromParts extends TimezonedDateAggregationExpression implements DateParts<DateFromParts> {
|
||||
@@ -2346,7 +2086,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class IsoDateFromParts extends TimezonedDateAggregationExpression
|
||||
@@ -2522,7 +2262,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class DateToParts extends TimezonedDateAggregationExpression {
|
||||
@@ -2603,7 +2343,7 @@ public class DateOperators {
|
||||
* @author Matt Morrissette
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/</a>
|
||||
* @since 2.1
|
||||
*/
|
||||
public static class DateFromString extends TimezonedDateAggregationExpression {
|
||||
@@ -2678,290 +2418,6 @@ public class DateOperators {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $dateAdd}.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 5.0 or later.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DateAdd extends TimezonedDateAggregationExpression {
|
||||
|
||||
private DateAdd(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a
|
||||
* {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateAdd addValueOf(AggregationExpression expression, String unit) {
|
||||
return addValue(expression, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateAdd addValueOf(String fieldReference, String unit) {
|
||||
return addValue(Fields.field(fieldReference), unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateAdd addValue(Object value, String unit) {
|
||||
|
||||
Map<String, Object> args = new HashMap<>();
|
||||
args.put("unit", unit);
|
||||
args.put("amount", value);
|
||||
return new DateAdd(args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd toDateOf(AggregationExpression expression) {
|
||||
return toDate(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd toDateOf(String fieldReference) {
|
||||
return toDate(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd toDate(Object dateExpression) {
|
||||
return new DateAdd(append("startDate", dateExpression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
|
||||
*
|
||||
* @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateAdd withTimezone(Timezone timezone) {
|
||||
return new DateAdd(appendTimezone(argumentMap(), timezone));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$dateAdd";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $dateDiff}.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 5.0 or later.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DateDiff extends TimezonedDateAggregationExpression {
|
||||
|
||||
private DateDiff(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a
|
||||
* {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateDiff diffValueOf(AggregationExpression expression, String unit) {
|
||||
return diffValue(expression, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateDiff diffValueOf(String fieldReference, String unit) {
|
||||
return diffValue(Fields.field(fieldReference), unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the number of {@literal units} to a {@link #toDate(Object) start date}.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param unit must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public static DateDiff diffValue(Object value, String unit) {
|
||||
|
||||
Map<String, Object> args = new HashMap<>();
|
||||
args.put("unit", unit);
|
||||
args.put("endDate", value);
|
||||
return new DateDiff(args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff toDateOf(AggregationExpression expression) {
|
||||
return toDate(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff toDateOf(String fieldReference) {
|
||||
return toDate(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the start date, in UTC, for the addition operation.
|
||||
*
|
||||
* @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff toDate(Object dateExpression) {
|
||||
return new DateDiff(append("startDate", dateExpression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used.
|
||||
*
|
||||
* @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead.
|
||||
* @return new instance of {@link DateAdd}.
|
||||
*/
|
||||
public DateDiff withTimezone(Timezone timezone) {
|
||||
return new DateDiff(appendTimezone(argumentMap(), timezone));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the start day of the week if the unit if measure is set to {@literal week}. Uses {@literal Sunday} by
|
||||
* default.
|
||||
*
|
||||
* @param day must not be {@literal null}.
|
||||
* @return new instance of {@link DateDiff}.
|
||||
*/
|
||||
public DateDiff startOfWeek(Object day) {
|
||||
return new DateDiff(append("startOfWeek", day));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$dateDiff";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface defining a temporal unit for date operators.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface TemporalUnit {
|
||||
|
||||
String name();
|
||||
|
||||
/**
|
||||
* Converts the given time unit into a {@link TemporalUnit}. Supported units are: days, hours, minutes, seconds, and
|
||||
* milliseconds.
|
||||
*
|
||||
* @param timeUnit the time unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static TemporalUnit from(TimeUnit timeUnit) {
|
||||
|
||||
Assert.notNull(timeUnit, "TimeUnit must not be null");
|
||||
|
||||
switch (timeUnit) {
|
||||
case DAYS:
|
||||
return TemporalUnits.DAY;
|
||||
case HOURS:
|
||||
return TemporalUnits.HOUR;
|
||||
case MINUTES:
|
||||
return TemporalUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return TemporalUnits.SECOND;
|
||||
case MILLISECONDS:
|
||||
return TemporalUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", timeUnit));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given chrono unit into a {@link TemporalUnit}. Supported units are: years, weeks, months, days,
|
||||
* hours, minutes, seconds, and millis.
|
||||
*
|
||||
* @param chronoUnit the chrono unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static TemporalUnit from(ChronoUnit chronoUnit) {
|
||||
|
||||
switch (chronoUnit) {
|
||||
case YEARS:
|
||||
return TemporalUnits.YEAR;
|
||||
case WEEKS:
|
||||
return TemporalUnits.WEEK;
|
||||
case MONTHS:
|
||||
return TemporalUnits.MONTH;
|
||||
case DAYS:
|
||||
return TemporalUnits.DAY;
|
||||
case HOURS:
|
||||
return TemporalUnits.HOUR;
|
||||
case MINUTES:
|
||||
return TemporalUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return TemporalUnits.SECOND;
|
||||
case MILLIS:
|
||||
return TemporalUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", chronoUnit));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Supported temporal units.
|
||||
*/
|
||||
enum TemporalUnits implements TemporalUnit {
|
||||
YEAR, QUARTER, WEEK, MONTH, DAY, HOUR, MINUTE, SECOND, MILLISECOND
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T extends TimezonedDateAggregationExpression> T applyTimezone(T instance, Timezone timezone) {
|
||||
return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone()
|
||||
|
||||
@@ -1,222 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
/**
|
||||
* Gateway to {@literal document expressions} such as {@literal $rank, $documentNumber, etc.}
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class DocumentOperators {
|
||||
|
||||
/**
|
||||
* Obtain the document position (including gaps) relative to others (rank).
|
||||
*
|
||||
* @return new instance of {@link Rank}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static Rank rank() {
|
||||
return new Rank();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the document position (without gaps) relative to others (rank).
|
||||
*
|
||||
* @return new instance of {@link DenseRank}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DenseRank denseRank() {
|
||||
return new DenseRank();
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the field referenced by given {@literal fieldReference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link DocumentOperatorsFactory}.
|
||||
*/
|
||||
public static DocumentOperatorsFactory valueOf(String fieldReference) {
|
||||
return new DocumentOperatorsFactory(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the value resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DocumentOperatorsFactory}.
|
||||
*/
|
||||
public static DocumentOperatorsFactory valueOf(AggregationExpression expression) {
|
||||
return new DocumentOperatorsFactory(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the current document position.
|
||||
*
|
||||
* @return new instance of {@link DocumentNumber}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static DocumentNumber documentNumber() {
|
||||
return new DocumentNumber();
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class DocumentOperatorsFactory {
|
||||
|
||||
private final Object target;
|
||||
|
||||
public DocumentOperatorsFactory(Object target) {
|
||||
this.target = target;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that applies the expression to a document at specified position
|
||||
* relative to the current document.
|
||||
*
|
||||
* @param by the value to add to the current position.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift shift(int by) {
|
||||
|
||||
Shift shift = usesExpression() ? Shift.shift((AggregationExpression) target) : Shift.shift(target.toString());
|
||||
return shift.by(by);
|
||||
}
|
||||
|
||||
private boolean usesExpression() {
|
||||
return target instanceof AggregationExpression;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents
|
||||
* occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class Rank implements AggregationExpression {
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$rank", new Document());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple
|
||||
* documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next
|
||||
* rank without any gaps.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DenseRank implements AggregationExpression {
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$denseRank", new Document());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentNumber} resolves the current document position.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DocumentNumber implements AggregationExpression {
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
return new Document("$documentNumber", new Document());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Shift applies an expression to a document in a specified position relative to the current document.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class Shift extends AbstractAggregationExpression {
|
||||
|
||||
private Shift(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the field to evaluate and return.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public static Shift shift(String fieldReference) {
|
||||
return new Shift(Collections.singletonMap("output", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the {@link AggregationExpression expression} to evaluate and return.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public static Shift shift(AggregationExpression expression) {
|
||||
return new Shift(Collections.singletonMap("output", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Shift the document position relative to the current. Use a positive value for follow up documents (eg. 1 for the
|
||||
* next) or a negative value for the predecessor documents (eg. -1 for the previous).
|
||||
*
|
||||
* @param shiftBy value to add to the current position.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift by(int shiftBy) {
|
||||
return new Shift(append("by", shiftBy));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the default value if the target document is out of range.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift defaultTo(Object value) {
|
||||
return new Shift(append("default", value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the {@link AggregationExpression expression} to evaluate if the target document is out of range.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Shift}.
|
||||
*/
|
||||
public Shift defaultToValueOf(AggregationExpression expression) {
|
||||
return defaultTo(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$shift";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,155 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Gateway to {@literal evaluation operators} such as {@literal $expr}.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public class EvaluationOperators {
|
||||
|
||||
/**
|
||||
* Take the value resulting from the given fieldReference.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link EvaluationOperatorFactory}.
|
||||
*/
|
||||
public static EvaluationOperatorFactory valueOf(String fieldReference) {
|
||||
return new EvaluationOperatorFactory(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Take the value resulting from the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link EvaluationOperatorFactory}.
|
||||
*/
|
||||
public static EvaluationOperatorFactory valueOf(AggregationExpression expression) {
|
||||
return new EvaluationOperatorFactory(expression);
|
||||
}
|
||||
|
||||
public static class EvaluationOperatorFactory {
|
||||
|
||||
private final String fieldReference;
|
||||
private final AggregationExpression expression;
|
||||
|
||||
/**
|
||||
* Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
*/
|
||||
public EvaluationOperatorFactory(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
this.fieldReference = fieldReference;
|
||||
this.expression = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
*/
|
||||
public EvaluationOperatorFactory(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
this.fieldReference = null;
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that is a valid aggregation expression.
|
||||
*
|
||||
* @return new instance of {@link Expr}.
|
||||
*/
|
||||
public Expr expr() {
|
||||
return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows the use of aggregation expressions within the query language.
|
||||
*/
|
||||
public static class Expr extends AbstractAggregationExpression {
|
||||
|
||||
private Expr(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$expr";
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Expr}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link Expr}.
|
||||
*/
|
||||
public static Expr valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new Expr(Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link Expr}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link Expr}.
|
||||
*/
|
||||
public static Expr valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new Expr(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates {@code $expr} as {@link CriteriaDefinition}.
|
||||
*
|
||||
* @return the {@link CriteriaDefinition} from this expression.
|
||||
*/
|
||||
public CriteriaDefinition toCriteriaDefinition(AggregationOperationContext context) {
|
||||
|
||||
Document criteriaObject = toDocument(context);
|
||||
|
||||
return new CriteriaDefinition() {
|
||||
@Override
|
||||
public Document getCriteriaObject() {
|
||||
return criteriaObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKey() {
|
||||
return getMongoMethod();
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -36,7 +36,8 @@ import org.springframework.util.ClassUtils;
|
||||
* We recommend to use the static factory method {@link Aggregation#graphLookup(String)} instead of creating instances
|
||||
* of this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.org/manual/reference/aggregation/graphLookup/">https://docs.mongodb.org/manual/reference/aggregation/graphLookup/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/reference/aggregation/graphLookup/">https://docs.mongodb.org/manual/reference/aggregation/graphLookup/</a>
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 1.10
|
||||
|
||||
@@ -139,7 +139,7 @@ public class GroupOperation implements FieldsExposingAggregationOperation {
|
||||
* Generates an {@link GroupOperationBuilder} for a {@code $sum}-expression.
|
||||
* <p>
|
||||
* Count expressions are emulated via {@code $sum: 1}.
|
||||
* </p>
|
||||
* <p>
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -30,7 +29,6 @@ import org.springframework.util.Assert;
|
||||
* @author Sebastian Herold
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Divya Srivastava
|
||||
* @since 1.3
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/match/">MongoDB Aggregation Framework:
|
||||
* $match</a>
|
||||
@@ -38,7 +36,6 @@ import org.springframework.util.Assert;
|
||||
public class MatchOperation implements AggregationOperation {
|
||||
|
||||
private final CriteriaDefinition criteriaDefinition;
|
||||
private final AggregationExpression expression;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}.
|
||||
@@ -48,23 +45,7 @@ public class MatchOperation implements AggregationOperation {
|
||||
public MatchOperation(CriteriaDefinition criteriaDefinition) {
|
||||
|
||||
Assert.notNull(criteriaDefinition, "Criteria must not be null!");
|
||||
|
||||
this.criteriaDefinition = criteriaDefinition;
|
||||
this.expression = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} for the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public MatchOperation(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
this.criteriaDefinition = null;
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -73,9 +54,7 @@ public class MatchOperation implements AggregationOperation {
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
return new Document(getOperator(),
|
||||
context.getMappedObject(expression != null ? expression.toDocument() : criteriaDefinition.getCriteriaObject()));
|
||||
return new Document(getOperator(), context.getMappedObject(criteriaDefinition.getCriteriaObject()));
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -30,7 +30,7 @@ import org.springframework.lang.Nullable;
|
||||
/**
|
||||
* {@link AggregationOperationContext} implementation prefixing non-command keys on root level with the given prefix.
|
||||
* Useful when mapping fields to domain specific types while having to prefix keys for query purpose.
|
||||
* <br />
|
||||
* <p />
|
||||
* Fields to be excluded from prefixing my be added to a {@literal denylist}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
|
||||
@@ -1803,9 +1803,8 @@ public class ProjectionOperation implements FieldsExposingAggregationOperation {
|
||||
Document projections = new Document();
|
||||
|
||||
Fields fields = context.getFields(type);
|
||||
|
||||
fields.forEach(it -> projections.append(it.getTarget(), 1));
|
||||
return projections;
|
||||
fields.forEach(it -> projections.append(it.getName(), 1));
|
||||
return context.getMappedObject(projections, type);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,8 @@ import org.springframework.util.Assert;
|
||||
* </pre>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/redact/">https://docs.mongodb.com/manual/reference/operator/aggregation/redact/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/redact/">https://docs.mongodb.com/manual/reference/operator/aggregation/redact/</a>
|
||||
* @since 3.0
|
||||
*/
|
||||
public class RedactOperation implements AggregationOperation {
|
||||
|
||||
@@ -21,7 +21,6 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField;
|
||||
import org.springframework.expression.spel.ast.Projection;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -30,7 +30,7 @@ import org.springframework.util.CollectionUtils;
|
||||
|
||||
/**
|
||||
* Gateway to {@literal $function} and {@literal $accumulator} aggregation operations.
|
||||
* <br />
|
||||
* <p />
|
||||
* Using {@link ScriptOperators} as part of the {@link Aggregation} requires MongoDB server to have
|
||||
* <a href="https://docs.mongodb.com/master/core/server-side-javascript/">server-side JavaScript</a> execution
|
||||
* <a href="https://docs.mongodb.com/master/reference/configuration-options/#security.javascriptEnabled">enabled</a>.
|
||||
@@ -43,7 +43,7 @@ public class ScriptOperators {
|
||||
|
||||
/**
|
||||
* Create a custom aggregation
|
||||
* <a href="https://docs.mongodb.com/master/reference/operator/aggregation/function/">$function</a> in JavaScript.
|
||||
* <a href="https://docs.mongodb.com/master/reference/operator/aggregation/function/">$function<a /> in JavaScript.
|
||||
*
|
||||
* @param body The function definition. Must not be {@literal null}.
|
||||
* @return new instance of {@link Function}.
|
||||
@@ -53,8 +53,8 @@ public class ScriptOperators {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a custom <a href="https://docs.mongodb.com/master/reference/operator/aggregation/accumulator/">$accumulator operator</a>
|
||||
* in Javascript.
|
||||
* Create a custom <a href="https://docs.mongodb.com/master/reference/operator/aggregation/accumulator/">$accumulator
|
||||
* operator</a> in Javascript.
|
||||
*
|
||||
* @return new instance of {@link AccumulatorInitBuilder}.
|
||||
*/
|
||||
@@ -65,7 +65,7 @@ public class ScriptOperators {
|
||||
/**
|
||||
* {@link Function} defines a custom aggregation
|
||||
* <a href="https://docs.mongodb.com/master/reference/operator/aggregation/function/">$function</a> in JavaScript.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* {
|
||||
* $function: {
|
||||
@@ -75,7 +75,7 @@ public class ScriptOperators {
|
||||
* }
|
||||
* }
|
||||
* </code>
|
||||
* <br />
|
||||
* <p />
|
||||
* {@link Function} cannot be used as part of {@link org.springframework.data.mongodb.core.schema.MongoJsonSchema
|
||||
* schema} validation query expression. <br />
|
||||
* <b>NOTE:</b> <a href="https://docs.mongodb.com/master/core/server-side-javascript/">Server-Side JavaScript</a>
|
||||
@@ -179,7 +179,7 @@ public class ScriptOperators {
|
||||
* <a href="https://docs.mongodb.com/master/reference/operator/aggregation/accumulator/">$accumulator operator</a>,
|
||||
* one that maintains its state (e.g. totals, maximums, minimums, and related data) as documents progress through the
|
||||
* pipeline, in JavaScript.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* {
|
||||
* $accumulator: {
|
||||
@@ -193,7 +193,7 @@ public class ScriptOperators {
|
||||
* }
|
||||
* }
|
||||
* </code>
|
||||
* <br />
|
||||
* <p />
|
||||
* {@link Accumulator} can be used as part of {@link GroupOperation $group}, {@link BucketOperation $bucket} and
|
||||
* {@link BucketAutoOperation $bucketAuto} pipeline stages. <br />
|
||||
* <b>NOTE:</b> <a href="https://docs.mongodb.com/master/core/server-side-javascript/">Server-Side JavaScript</a>
|
||||
@@ -241,7 +241,7 @@ public class ScriptOperators {
|
||||
/**
|
||||
* Define the {@code init} {@link Function} for the {@link Accumulator accumulators} initial state. The function
|
||||
* receives its arguments from the {@link Function#args(Object...) initArgs} array expression.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(initArg1, initArg2, ...) {
|
||||
* ...
|
||||
@@ -259,7 +259,7 @@ public class ScriptOperators {
|
||||
/**
|
||||
* Define the {@code init} function for the {@link Accumulator accumulators} initial state. The function receives
|
||||
* its arguments from the {@link AccumulatorInitArgsBuilder#initArgs(Object...)} array expression.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(initArg1, initArg2, ...) {
|
||||
* ...
|
||||
@@ -308,7 +308,7 @@ public class ScriptOperators {
|
||||
* Set the {@code accumulate} {@link Function} that updates the state for each document. The functions first
|
||||
* argument is the current {@code state}, additional arguments can be defined via {@link Function#args(Object...)
|
||||
* accumulateArgs}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(state, accumArg1, accumArg2, ...) {
|
||||
* ...
|
||||
@@ -327,7 +327,7 @@ public class ScriptOperators {
|
||||
* Set the {@code accumulate} function that updates the state for each document. The functions first argument is
|
||||
* the current {@code state}, additional arguments can be defined via
|
||||
* {@link AccumulatorAccumulateArgsBuilder#accumulateArgs(Object...)}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(state, accumArg1, accumArg2, ...) {
|
||||
* ...
|
||||
@@ -370,7 +370,7 @@ public class ScriptOperators {
|
||||
* Set the {@code merge} function used to merge two internal states. <br />
|
||||
* This might be required because the operation is run on a sharded cluster or when the operator exceeds its
|
||||
* memory limit.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(state1, state2) {
|
||||
* ...
|
||||
@@ -389,7 +389,7 @@ public class ScriptOperators {
|
||||
/**
|
||||
* Set the {@code finalize} function used to update the result of the accumulation when all documents have been
|
||||
* processed.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(state) {
|
||||
* ...
|
||||
@@ -425,7 +425,7 @@ public class ScriptOperators {
|
||||
/**
|
||||
* Define the {@code init} function for the {@link Accumulator accumulators} initial state. The function receives
|
||||
* its arguments from the {@link #initArgs(Object...)} array expression.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(initArg1, initArg2, ...) {
|
||||
* ...
|
||||
@@ -461,7 +461,7 @@ public class ScriptOperators {
|
||||
/**
|
||||
* Set the {@code accumulate} function that updates the state for each document. The functions first argument is
|
||||
* the current {@code state}, additional arguments can be defined via {@link #accumulateArgs(Object...)}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(state, accumArg1, accumArg2, ...) {
|
||||
* ...
|
||||
@@ -500,7 +500,7 @@ public class ScriptOperators {
|
||||
* Set the {@code merge} function used to merge two internal states. <br />
|
||||
* This might be required because the operation is run on a sharded cluster or when the operator exceeds its
|
||||
* memory limit.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(state1, state2) {
|
||||
* ...
|
||||
@@ -537,7 +537,7 @@ public class ScriptOperators {
|
||||
/**
|
||||
* Set the {@code finalize} function used to update the result of the accumulation when all documents have been
|
||||
* processed.
|
||||
* <br />
|
||||
* <p />
|
||||
* <code class="java">
|
||||
* function(state) {
|
||||
* ...
|
||||
|
||||
@@ -193,6 +193,5 @@ public class SetOperation extends DocumentEnhancingOperation {
|
||||
*/
|
||||
SetOperation withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,873 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Encapsulates the {@code setWindowFields}-operation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/">https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/</a>
|
||||
*/
|
||||
public class SetWindowFieldsOperation
|
||||
implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation {
|
||||
|
||||
private static final String CURRENT = "current";
|
||||
private static final String UNBOUNDED = "unbounded";
|
||||
|
||||
private final @Nullable Object partitionBy;
|
||||
private final @Nullable AggregationOperation sortBy;
|
||||
private final WindowOutput output;
|
||||
|
||||
/**
|
||||
* Create a new {@link SetWindowFieldsOperation} with given args.
|
||||
*
|
||||
* @param partitionBy The field or {@link AggregationExpression} to group by.
|
||||
* @param sortBy the {@link SortOperation operation} to sort the documents by in the partition.
|
||||
* @param output the {@link WindowOutput} containing the fields to add and the rules to calculate their respective
|
||||
* values.
|
||||
*/
|
||||
protected SetWindowFieldsOperation(@Nullable Object partitionBy, @Nullable AggregationOperation sortBy,
|
||||
WindowOutput output) {
|
||||
|
||||
this.partitionBy = partitionBy;
|
||||
this.sortBy = sortBy;
|
||||
this.output = output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a {@link SetWindowFieldsOperationBuilder builder} to create a {@link SetWindowFieldsOperation}.
|
||||
*
|
||||
* @return new instance of {@link SetWindowFieldsOperationBuilder}.
|
||||
*/
|
||||
public static SetWindowFieldsOperationBuilder builder() {
|
||||
return new SetWindowFieldsOperationBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExposedFields getFields() {
|
||||
return ExposedFields.nonSynthetic(Fields.from(output.fields.toArray(new Field[0])));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext context) {
|
||||
|
||||
Document $setWindowFields = new Document();
|
||||
if (partitionBy != null) {
|
||||
if (partitionBy instanceof AggregationExpression) {
|
||||
$setWindowFields.append("partitionBy", ((AggregationExpression) partitionBy).toDocument(context));
|
||||
} else if (partitionBy instanceof Field) {
|
||||
$setWindowFields.append("partitionBy", context.getReference((Field) partitionBy).toString());
|
||||
} else {
|
||||
$setWindowFields.append("partitionBy", partitionBy);
|
||||
}
|
||||
}
|
||||
|
||||
if (sortBy != null) {
|
||||
$setWindowFields.append("sortBy", sortBy.toDocument(context).get(sortBy.getOperator()));
|
||||
}
|
||||
|
||||
Document output = new Document();
|
||||
for (ComputedField field : this.output.fields) {
|
||||
|
||||
Document fieldOperation = field.getWindowOperator().toDocument(context);
|
||||
if (field.window != null) {
|
||||
fieldOperation.put("window", field.window.toDocument(context));
|
||||
}
|
||||
output.append(field.getName(), fieldOperation);
|
||||
}
|
||||
$setWindowFields.append("output", output);
|
||||
|
||||
return new Document(getOperator(), $setWindowFields);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator()
|
||||
*/
|
||||
@Override
|
||||
public String getOperator() {
|
||||
return "$setWindowFields";
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link WindowOutput} defines output of {@literal $setWindowFields} stage by defining the {@link ComputedField
|
||||
* field(s)} to append to the documents in the output.
|
||||
*/
|
||||
public static class WindowOutput {
|
||||
|
||||
private final List<ComputedField> fields;
|
||||
|
||||
/**
|
||||
* Create a new output containing the single given {@link ComputedField field}.
|
||||
*
|
||||
* @param outputField must not be {@literal null}.
|
||||
*/
|
||||
public WindowOutput(ComputedField outputField) {
|
||||
|
||||
Assert.notNull(outputField, "OutputField must not be null!");
|
||||
|
||||
this.fields = new ArrayList<>();
|
||||
this.fields.add(outputField);
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the given {@link ComputedField field} to the outptut.
|
||||
*
|
||||
* @param field must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public WindowOutput append(ComputedField field) {
|
||||
|
||||
Assert.notNull(field, "Field must not be null!");
|
||||
|
||||
fields.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the given {@link AggregationExpression} as a {@link ComputedField field} in a fluent way.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ComputedFieldAppender}.
|
||||
* @see #append(ComputedField)
|
||||
*/
|
||||
public ComputedFieldAppender append(AggregationExpression expression) {
|
||||
|
||||
return new ComputedFieldAppender() {
|
||||
|
||||
@Nullable private Window window;
|
||||
|
||||
@Override
|
||||
public WindowOutput as(String fieldname) {
|
||||
|
||||
return WindowOutput.this.append(new ComputedField(fieldname, expression, window));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ComputedFieldAppender within(Window window) {
|
||||
this.window = window;
|
||||
return this;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Tiny little helper to allow fluent API usage for {@link #append(ComputedField)}.
|
||||
*/
|
||||
interface ComputedFieldAppender {
|
||||
|
||||
/**
|
||||
* Specify the target field name.
|
||||
*
|
||||
* @param fieldname the name of field to add to the target document.
|
||||
* @return the {@link WindowOutput} that started the append operation.
|
||||
*/
|
||||
WindowOutput as(String fieldname);
|
||||
|
||||
/**
|
||||
* Specify the window boundaries.
|
||||
*
|
||||
* @param window must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
ComputedFieldAppender within(Window window);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Field} that the result of a computation done via an {@link AggregationExpression}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class ComputedField implements Field {
|
||||
|
||||
private final String name;
|
||||
private final AggregationExpression windowOperator;
|
||||
private final @Nullable Window window;
|
||||
|
||||
/**
|
||||
* Create a new {@link ComputedField}.
|
||||
*
|
||||
* @param name the target field name.
|
||||
* @param windowOperator the expression to calculate the field value.
|
||||
*/
|
||||
public ComputedField(String name, AggregationExpression windowOperator) {
|
||||
this(name, windowOperator, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ComputedField}.
|
||||
*
|
||||
* @param name the target field name.
|
||||
* @param windowOperator the expression to calculate the field value.
|
||||
* @param window the boundaries to operate within. Can be {@literal null}.
|
||||
*/
|
||||
public ComputedField(String name, AggregationExpression windowOperator, @Nullable Window window) {
|
||||
|
||||
this.name = name;
|
||||
this.windowOperator = windowOperator;
|
||||
this.window = window;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTarget() {
|
||||
return getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAliased() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public AggregationExpression getWindowOperator() {
|
||||
return windowOperator;
|
||||
}
|
||||
|
||||
public Window getWindow() {
|
||||
return window;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick access to {@link DocumentWindow documents} and {@literal RangeWindow range} {@link Window windows}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface Windows {
|
||||
|
||||
/**
|
||||
* Create a document window relative to the position of the current document.
|
||||
*
|
||||
* @param lower an integer for a position relative to the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @param upper an integer for a position relative to the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @return new instance of {@link DocumentWindow}.
|
||||
*/
|
||||
static DocumentWindow documents(Object lower, Object upper) {
|
||||
return new DocumentWindow(lower, upper);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range window defined based on sort expression.
|
||||
*
|
||||
* @param lower a numeric value to add the sort by field value of the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @param upper a numeric value to add the sort by field value of the current document, {@literal current} or
|
||||
* {@literal unbounded}.
|
||||
* @return new instance of {@link RangeWindow}.
|
||||
*/
|
||||
static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) {
|
||||
return new RangeWindow(lower, upper, unit == null ? WindowUnits.DEFAULT : unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a range window based on the {@link Sort sort value} of the current document via a fluent API.
|
||||
*
|
||||
* @return new instance of {@link RangeWindowBuilder}.
|
||||
*/
|
||||
static RangeWindowBuilder range() {
|
||||
return new RangeWindowBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a document window relative to the position of the current document via a fluent API.
|
||||
*
|
||||
* @return new instance of {@link DocumentWindowBuilder}.
|
||||
*/
|
||||
static DocumentWindowBuilder documents() {
|
||||
return new DocumentWindowBuilder();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link Window} to be used for {@link ComputedField#getWindow() ComputedField}.
|
||||
*/
|
||||
public interface Window {
|
||||
|
||||
/**
|
||||
* The lower (inclusive) boundary.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Object getLower();
|
||||
|
||||
/**
|
||||
* The upper (inclusive) boundary.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
Object getUpper();
|
||||
|
||||
/**
|
||||
* Obtain the document representation of the window in a default {@link AggregationOperationContext context}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
default Document toDocument() {
|
||||
return toDocument(Aggregation.DEFAULT_CONTEXT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the document representation of the window in the given {@link AggregationOperationContext context}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Document toDocument(AggregationOperationContext ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder API for a {@link RangeWindow}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class RangeWindowBuilder {
|
||||
|
||||
private @Nullable Object lower;
|
||||
private @Nullable Object upper;
|
||||
private @Nullable WindowUnit unit;
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit based on the sortBy field.
|
||||
*
|
||||
* @param lower eg. {@literal current} or {@literal unbounded}.
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder from(String lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The upper (inclusive) range limit based on the sortBy field.
|
||||
*
|
||||
* @param upper eg. {@literal current} or {@literal unbounded}.
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder to(String upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for
|
||||
* a position before the current document. Use a positive integer for a position after the current document.
|
||||
* {@code 0} is the current document position.
|
||||
*
|
||||
* @param lower
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder from(Number lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The upper (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for
|
||||
* a position before the current document. Use a positive integer for a position after the current document.
|
||||
* {@code 0} is the current document position.
|
||||
*
|
||||
* @param upper
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder to(Number upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal current} as {@link #from(String) lower} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder fromCurrent() {
|
||||
return from(CURRENT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal unbounded} as {@link #from(String) lower} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder fromUnbounded() {
|
||||
return from(UNBOUNDED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal current} as {@link #to(String) upper} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder toCurrent() {
|
||||
return to(CURRENT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use {@literal unbounded} as {@link #to(String) upper} limit.
|
||||
*
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder toUnbounded() {
|
||||
return to(UNBOUNDED);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link WindowUnit unit} or measure for the given {@link Window}.
|
||||
*
|
||||
* @param windowUnit must not be {@literal null}. Can be on of {@link Windows}.
|
||||
* @return this.
|
||||
*/
|
||||
public RangeWindowBuilder unit(WindowUnit windowUnit) {
|
||||
|
||||
Assert.notNull(windowUnit, "WindowUnit must not be null");
|
||||
this.unit = windowUnit;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the {@link RangeWindow}.
|
||||
*
|
||||
* @return new instance of {@link RangeWindow}.
|
||||
*/
|
||||
public RangeWindow build() {
|
||||
|
||||
Assert.notNull(lower, "Lower bound must not be null");
|
||||
Assert.notNull(upper, "Upper bound must not be null");
|
||||
Assert.notNull(unit, "WindowUnit bound must not be null");
|
||||
|
||||
return new RangeWindow(lower, upper, unit);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder API for a {@link RangeWindow}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class DocumentWindowBuilder {
|
||||
|
||||
private @Nullable Object lower;
|
||||
private @Nullable Object upper;
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit based on current document. Use a negative integer for a position before the
|
||||
* current document. Use a positive integer for a position after the current document. {@code 0} is the current
|
||||
* document position.
|
||||
*
|
||||
* @param lower
|
||||
* @return this.
|
||||
*/
|
||||
public DocumentWindowBuilder from(Number lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder fromCurrent() {
|
||||
return from(CURRENT);
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder fromUnbounded() {
|
||||
return from(UNBOUNDED);
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder to(String upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The lower (inclusive) range limit based on current document.
|
||||
*
|
||||
* @param lower eg. {@literal current} or {@literal unbounded}.
|
||||
* @return this.
|
||||
*/
|
||||
public DocumentWindowBuilder from(String lower) {
|
||||
|
||||
this.lower = lower;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The upper (inclusive) range limit based on current document. Use a negative integer for a position before the
|
||||
* current document. Use a positive integer for a position after the current document. {@code 0} is the current
|
||||
* document position.
|
||||
*
|
||||
* @param upper
|
||||
* @return this.
|
||||
*/
|
||||
public DocumentWindowBuilder to(Number upper) {
|
||||
|
||||
this.upper = upper;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder toCurrent() {
|
||||
return to(CURRENT);
|
||||
}
|
||||
|
||||
public DocumentWindowBuilder toUnbounded() {
|
||||
return to(UNBOUNDED);
|
||||
}
|
||||
|
||||
public DocumentWindow build() {
|
||||
|
||||
Assert.notNull(lower, "Lower bound must not be null");
|
||||
Assert.notNull(upper, "Upper bound must not be null");
|
||||
|
||||
return new DocumentWindow(lower, upper);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Common base class for {@link Window} implementation.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static abstract class WindowImpl implements Window {
|
||||
|
||||
private final Object lower;
|
||||
private final Object upper;
|
||||
|
||||
protected WindowImpl(Object lower, Object upper) {
|
||||
this.lower = lower;
|
||||
this.upper = upper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getLower() {
|
||||
return lower;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getUpper() {
|
||||
return upper;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Window} implementation based on the current document.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class DocumentWindow extends WindowImpl {
|
||||
|
||||
DocumentWindow(Object lower, Object upper) {
|
||||
super(lower, upper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext ctx) {
|
||||
return new Document("documents", Arrays.asList(getLower(), getUpper()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link Window} implementation based on the sort fields.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class RangeWindow extends WindowImpl {
|
||||
|
||||
private final WindowUnit unit;
|
||||
|
||||
protected RangeWindow(Object lower, Object upper, WindowUnit unit) {
|
||||
|
||||
super(lower, upper);
|
||||
this.unit = unit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document toDocument(AggregationOperationContext ctx) {
|
||||
|
||||
Document range = new Document("range", new Object[] { getLower(), getUpper() });
|
||||
if (unit != null && !WindowUnits.DEFAULT.equals(unit)) {
|
||||
range.append("unit", unit.name().toLowerCase());
|
||||
}
|
||||
return range;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The actual time unit to apply to a {@link Window}.
|
||||
*/
|
||||
public interface WindowUnit {
|
||||
|
||||
String name();
|
||||
|
||||
/**
|
||||
* Converts the given time unit into a {@link WindowUnit}. Supported units are: days, hours, minutes, seconds, and
|
||||
* milliseconds.
|
||||
*
|
||||
* @param timeUnit the time unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static WindowUnit from(TimeUnit timeUnit) {
|
||||
|
||||
Assert.notNull(timeUnit, "TimeUnit must not be null");
|
||||
|
||||
switch (timeUnit) {
|
||||
case DAYS:
|
||||
return WindowUnits.DAY;
|
||||
case HOURS:
|
||||
return WindowUnits.HOUR;
|
||||
case MINUTES:
|
||||
return WindowUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return WindowUnits.SECOND;
|
||||
case MILLISECONDS:
|
||||
return WindowUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", timeUnit));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the given chrono unit into a {@link WindowUnit}. Supported units are: years, weeks, months, days, hours,
|
||||
* minutes, seconds, and millis.
|
||||
*
|
||||
* @param chronoUnit the chrono unit to convert, must not be {@literal null}.
|
||||
* @return
|
||||
* @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion.
|
||||
*/
|
||||
static WindowUnit from(ChronoUnit chronoUnit) {
|
||||
|
||||
switch (chronoUnit) {
|
||||
case YEARS:
|
||||
return WindowUnits.YEAR;
|
||||
case WEEKS:
|
||||
return WindowUnits.WEEK;
|
||||
case MONTHS:
|
||||
return WindowUnits.MONTH;
|
||||
case DAYS:
|
||||
return WindowUnits.DAY;
|
||||
case HOURS:
|
||||
return WindowUnits.HOUR;
|
||||
case MINUTES:
|
||||
return WindowUnits.MINUTE;
|
||||
case SECONDS:
|
||||
return WindowUnits.SECOND;
|
||||
case MILLIS:
|
||||
return WindowUnits.MILLISECOND;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", chronoUnit));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick access to available {@link WindowUnit units}.
|
||||
*/
|
||||
public enum WindowUnits implements WindowUnit {
|
||||
DEFAULT, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND
|
||||
}
|
||||
|
||||
/**
|
||||
* A fluent builder to create a {@link SetWindowFieldsOperation}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public static class SetWindowFieldsOperationBuilder {
|
||||
|
||||
private Object partitionBy;
|
||||
private SortOperation sortOperation;
|
||||
private WindowOutput output;
|
||||
|
||||
/**
|
||||
* Specify the field to group by.
|
||||
*
|
||||
* @param fieldName must not be {@literal null} or null.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder partitionByField(String fieldName) {
|
||||
|
||||
Assert.hasText(fieldName, "Field name must not be empty or null");
|
||||
return partitionBy(Fields.field("$" + fieldName, fieldName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression expression} to group by.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder partitionByExpression(AggregationExpression expression) {
|
||||
return partitionBy(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort {@link Sort.Direction#ASC ascending} by the given fields.
|
||||
*
|
||||
* @param fields must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder sortBy(String... fields) {
|
||||
return sortBy(Sort.by(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the sort order.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder sortBy(Sort sort) {
|
||||
return sortBy(new SortOperation(sort));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link SortOperation} to use.
|
||||
*
|
||||
* @param sort must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) {
|
||||
|
||||
Assert.notNull(sort, "SortOperation must not be null");
|
||||
|
||||
this.sortOperation = sort;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the actual output computation.
|
||||
*
|
||||
* @param output must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder output(WindowOutput output) {
|
||||
|
||||
Assert.notNull(output, "WindowOutput must not be null");
|
||||
|
||||
this.output = output;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a field capturing the result of the given {@link AggregationExpression expression} to the output.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link WindowChoice}.
|
||||
*/
|
||||
public WindowChoice output(AggregationExpression expression) {
|
||||
|
||||
return new WindowChoice() {
|
||||
|
||||
@Nullable private Window window;
|
||||
|
||||
@Override
|
||||
public As within(Window window) {
|
||||
|
||||
Assert.notNull(window, "Window must not be null");
|
||||
|
||||
this.window = window;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SetWindowFieldsOperationBuilder as(String targetFieldName) {
|
||||
|
||||
Assert.hasText(targetFieldName, "Target field name must not be empty or null");
|
||||
|
||||
ComputedField computedField = new ComputedField(targetFieldName, expression, window);
|
||||
|
||||
if (SetWindowFieldsOperationBuilder.this.output == null) {
|
||||
SetWindowFieldsOperationBuilder.this.output = new WindowOutput(computedField);
|
||||
} else {
|
||||
SetWindowFieldsOperationBuilder.this.output.append(computedField);
|
||||
}
|
||||
|
||||
return SetWindowFieldsOperationBuilder.this;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface to capture field name used to capture the computation result.
|
||||
*/
|
||||
public interface As {
|
||||
|
||||
/**
|
||||
* Define the target name field name to hold the computation result.
|
||||
*
|
||||
* @param targetFieldName must not be {@literal null} or empty.
|
||||
* @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance.
|
||||
*/
|
||||
SetWindowFieldsOperationBuilder as(String targetFieldName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface to capture an optional {@link Window} applicable to the field computation.
|
||||
*/
|
||||
public interface WindowChoice extends As {
|
||||
|
||||
/**
|
||||
* Specify calculation boundaries.
|
||||
*
|
||||
* @param window must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
As within(Window window);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Partition by a value that translates to a valid mongodb expression.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return this.
|
||||
*/
|
||||
public SetWindowFieldsOperationBuilder partitionBy(Object value) {
|
||||
|
||||
Assert.notNull(value, "Partition By must not be null");
|
||||
|
||||
partitionBy = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments.
|
||||
*
|
||||
* @return new instance of {@link SetWindowFieldsOperation}.
|
||||
*/
|
||||
public SetWindowFieldsOperation build() {
|
||||
return new SetWindowFieldsOperation(partitionBy, sortOperation, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,16 +21,17 @@ import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* Encapsulates the aggregation framework {@code $sortByCount}-operation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* {@code $sortByCount} stage is typically used with {@link Aggregation} and {@code $facet}. Groups incoming documents
|
||||
* based on the value of a specified expression and computes the count of documents in each distinct group.
|
||||
* {@link SortByCountOperation} is equivalent to {@code { $group: { _id: <expression>, count: { $sum: 1 } } }, { $sort:
|
||||
* { count: -1 } }}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* We recommend to use the static factory method {@link Aggregation#sortByCount(String)} instead of creating instances
|
||||
* of this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/sortByCount/">https://docs.mongodb.com/manual/reference/operator/aggregation/sortByCount/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/sortByCount/">https://docs.mongodb.com/manual/reference/operator/aggregation/sortByCount/</a>
|
||||
* @author Jérôme Guyon
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
|
||||
@@ -102,7 +102,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG);
|
||||
ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state);
|
||||
|
||||
return transform(new AggregationExpressionTransformationContext<>(node, null, null, context));
|
||||
return transform(new AggregationExpressionTransformationContext<ExpressionNode>(node, null, null, context));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -500,10 +500,7 @@ class SpelExpressionTransformer implements AggregationExpressionTransformer {
|
||||
dbo.put(methodReference.getArgumentMap()[i++], transform(child, context));
|
||||
}
|
||||
args = dbo;
|
||||
} else if (ObjectUtils.nullSafeEquals(methodReference.getArgumentType(), ArgumentType.EMPTY_DOCUMENT)) {
|
||||
args = new Document();
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
|
||||
List<Object> argList = new ArrayList<Object>();
|
||||
|
||||
|
||||
@@ -18,11 +18,8 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.springframework.data.domain.Range;
|
||||
import org.springframework.data.mongodb.util.RegexFlags;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
@@ -30,7 +27,6 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Divya Srivastava
|
||||
* @since 1.10
|
||||
*/
|
||||
public class StringOperators {
|
||||
@@ -520,173 +516,6 @@ public class StringOperators {
|
||||
return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the given
|
||||
* regular expression to find the document with the first match.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(String regex) {
|
||||
return createRegexFind().regex(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression resulting from the given {@link AggregationExpression} to find the document with the first
|
||||
* match.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(AggregationExpression expression) {
|
||||
return createRegexFind().regexOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the {@link Pattern} and applies the regular expression with
|
||||
* the options specified in the argument to find the document with the first match.
|
||||
*
|
||||
* @param pattern the pattern object to apply.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(Pattern pattern) {
|
||||
return createRegexFind().pattern(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression with the options specified in the argument to find the document with the first match.
|
||||
*
|
||||
* @param regex the regular expression to apply.
|
||||
* @param options the options to use.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFind regexFind(String regex, String options) {
|
||||
return createRegexFind().regex(regex).options(options);
|
||||
}
|
||||
|
||||
private RegexFind createRegexFind() {
|
||||
return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the given
|
||||
* regular expression to find all the documents with the match.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(String regex) {
|
||||
return createRegexFindAll().regex(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression resulting from the given {@link AggregationExpression} to find all the documents with the
|
||||
* match..<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(AggregationExpression expression) {
|
||||
return createRegexFindAll().regexOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with
|
||||
* the options specified in the argument to find all the documents with the match.
|
||||
*
|
||||
* @param pattern the pattern object to apply.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(Pattern pattern) {
|
||||
return createRegexFindAll().pattern(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression with the options specified in the argument to find all the documents with the match.
|
||||
*
|
||||
* @param regex the regular expression to apply.
|
||||
* @param options the options to use.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexFindAll regexFindAll(String regex, String options) {
|
||||
return createRegexFindAll().regex(regex).options(options);
|
||||
}
|
||||
|
||||
private RegexFindAll createRegexFindAll() {
|
||||
return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the given
|
||||
* regular expression to find if a match is found or not.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(String regex) {
|
||||
return createRegexMatch().regex(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression resulting from the given {@link AggregationExpression} to find if a match is found or not.<br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(AggregationExpression expression) {
|
||||
return createRegexMatch().regexOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with
|
||||
* the options specified in the argument to find if a match is found or not.
|
||||
*
|
||||
* @param pattern the pattern object to apply.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(Pattern pattern) {
|
||||
return createRegexMatch().pattern(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular
|
||||
* expression with the options specified in the argument to find if a match is found or not.
|
||||
*
|
||||
* @param regex the regular expression to apply.
|
||||
* @param options the options to use.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public RegexMatch regexMatch(String regex, String options) {
|
||||
return createRegexMatch().regex(regex).options(options);
|
||||
}
|
||||
|
||||
private RegexMatch createRegexMatch() {
|
||||
return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression);
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
@@ -1648,434 +1477,4 @@ public class StringOperators {
|
||||
return "$rtrim";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and
|
||||
* returns information on the first matched substring. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class RegexFind extends AbstractAggregationExpression {
|
||||
|
||||
protected RegexFind(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public static RegexFind valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public static RegexFind valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFind(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the options to use with the regular expression.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind options(String options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new RegexFind(append("options", options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the reference to the {@link Field field} holding the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind optionsOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexFind(append("options", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind optionsOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFind(append("options", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the regular expression to apply.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind regex(String regex) {
|
||||
|
||||
Assert.notNull(regex, "Regex must not be null!");
|
||||
|
||||
return new RegexFind(append("regex", regex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a {@link Pattern} into {@code regex} and {@code options} fields.
|
||||
*
|
||||
* @param pattern must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind pattern(Pattern pattern) {
|
||||
|
||||
Assert.notNull(pattern, "Pattern must not be null!");
|
||||
|
||||
Map<String, Object> regex = append("regex", pattern.pattern());
|
||||
regex.put("options", RegexFlags.toRegexOptions(pattern.flags()));
|
||||
|
||||
return new RegexFind(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the regular expression to apply.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind regexOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new RegexFind(append("regex", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression} evaluating to the regular expression to apply.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFind}.
|
||||
*/
|
||||
public RegexFind regexOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFind(append("regex", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$regexFind";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and
|
||||
* returns information on all the matched substrings. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class RegexFindAll extends AbstractAggregationExpression {
|
||||
|
||||
protected RegexFindAll(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public static RegexFindAll valueOf(String fieldReference) {
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as
|
||||
* {@literal input} value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public static RegexFindAll valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFindAll(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the options to use with the regular expression.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll options(String options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new RegexFindAll(append("options", options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the reference to the {@link Field field} holding the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll optionsOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new RegexFindAll(append("options", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll optionsOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFindAll(append("options", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a {@link Pattern} into {@code regex} and {@code options} fields.
|
||||
*
|
||||
* @param pattern must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll pattern(Pattern pattern) {
|
||||
|
||||
Assert.notNull(pattern, "Pattern must not be null!");
|
||||
|
||||
Map<String, Object> regex = append("regex", pattern.pattern());
|
||||
regex.put("options", RegexFlags.toRegexOptions(pattern.flags()));
|
||||
|
||||
return new RegexFindAll(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the regular expression to apply.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll regex(String regex) {
|
||||
|
||||
Assert.notNull(regex, "Regex must not be null!");
|
||||
|
||||
return new RegexFindAll(append("regex", regex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the regular expression to apply.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll regexOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "fieldReference must not be null!");
|
||||
|
||||
return new RegexFindAll(append("regex", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the {@link AggregationExpression} evaluating to the regular expression to apply.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexFindAll}.
|
||||
*/
|
||||
public RegexFindAll regexOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexFindAll(append("regex", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$regexFindAll";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and
|
||||
* returns a boolean that indicates if a match is found or not. <br />
|
||||
* <strong>NOTE:</strong> Requires MongoDB 4.0 or later.
|
||||
*
|
||||
* @author Divya Srivastava
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class RegexMatch extends AbstractAggregationExpression {
|
||||
|
||||
protected RegexMatch(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public static RegexMatch valueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input}
|
||||
* value.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public static RegexMatch valueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexMatch(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the options to use with the regular expression.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch options(String options) {
|
||||
|
||||
Assert.notNull(options, "Options must not be null!");
|
||||
|
||||
return new RegexMatch(append("options", options));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the reference to the {@link Field field} holding the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch optionsOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexMatch(append("options", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular
|
||||
* expression.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch optionsOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexMatch(append("options", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a {@link Pattern} into {@code regex} and {@code options} fields.
|
||||
*
|
||||
* @param pattern must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch pattern(Pattern pattern) {
|
||||
|
||||
Assert.notNull(pattern, "Pattern must not be null!");
|
||||
|
||||
Map<String, Object> regex = append("regex", pattern.pattern());
|
||||
regex.put("options", RegexFlags.toRegexOptions(pattern.flags()));
|
||||
|
||||
return new RegexMatch(regex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the regular expression to apply.
|
||||
*
|
||||
* @param regex must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch regex(String regex) {
|
||||
|
||||
Assert.notNull(regex, "Regex must not be null!");
|
||||
|
||||
return new RegexMatch(append("regex", regex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify the reference to the {@link Field field} holding the regular expression to apply.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch regexOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
|
||||
return new RegexMatch(append("regex", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link RegexMatch}.
|
||||
*/
|
||||
public RegexMatch regexOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
|
||||
return new RegexMatch(append("regex", expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$regexMatch";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference;
|
||||
@@ -122,13 +122,13 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
return AggregationOperationContext.super.getFields(type);
|
||||
}
|
||||
|
||||
List<Field> fields = new ArrayList<>();
|
||||
List<String> fields = new ArrayList<>();
|
||||
|
||||
for (MongoPersistentProperty property : entity) {
|
||||
fields.add(Fields.field(property.getName(), property.getFieldName()));
|
||||
fields.add(property.getName());
|
||||
}
|
||||
|
||||
return Fields.from(fields.toArray(new Field[0]));
|
||||
return Fields.fields(fields.toArray(new String[0]));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -142,13 +142,12 @@ public class TypeBasedAggregationOperationContext implements AggregationOperatio
|
||||
|
||||
/**
|
||||
* This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for
|
||||
* its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that
|
||||
* its existence. Typically the {@link AggregationOperationContext} fails when referencing unknown fields, those that
|
||||
* are not present in one of the previous stages or the input source, throughout the pipeline.
|
||||
*
|
||||
* @param type The domain type to map fields to.
|
||||
* @return a more relaxed {@link AggregationOperationContext}.
|
||||
* @since 3.1
|
||||
* @see RelaxedTypeBasedAggregationOperationContext
|
||||
*/
|
||||
public AggregationOperationContext continueOnMissingFieldReference(Class<?> type) {
|
||||
return new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, mapper);
|
||||
|
||||
@@ -28,7 +28,7 @@ import org.springframework.util.Assert;
|
||||
* containing duplicates, into a single result set that is handed over to the next stage. <br />
|
||||
* In order to remove duplicates it is possible to append a {@link GroupOperation} right after
|
||||
* {@link UnionWithOperation}.
|
||||
* <br />
|
||||
* <p />
|
||||
* If the {@link UnionWithOperation} uses a
|
||||
* <a href="https://docs.mongodb.com/master/reference/operator/aggregation/unionWith/#unionwith-pipeline">pipeline</a>
|
||||
* to process documents, field names within the pipeline will be treated as is. In order to map domain type property
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mongodb.DBRef;
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
public interface DbRefResolver extends ReferenceResolver {
|
||||
public interface DbRefResolver {
|
||||
|
||||
/**
|
||||
* Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method
|
||||
|
||||
@@ -15,6 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@@ -22,18 +29,28 @@ import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
@@ -50,11 +67,13 @@ import com.mongodb.client.model.Filters;
|
||||
* @author Mark Paluch
|
||||
* @since 1.4
|
||||
*/
|
||||
public class DefaultDbRefResolver extends DefaultReferenceResolver implements DbRefResolver, ReferenceResolver {
|
||||
public class DefaultDbRefResolver implements DbRefResolver {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(DefaultDbRefResolver.class);
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDbRefResolver.class);
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
/**
|
||||
* Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}.
|
||||
@@ -63,11 +82,11 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db
|
||||
*/
|
||||
public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) {
|
||||
|
||||
super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory), mongoDbFactory.getExceptionTranslator());
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
this.exceptionTranslator = mongoDbFactory.getExceptionTranslator();
|
||||
this.objenesis = new ObjenesisStd(true);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -95,8 +114,17 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db
|
||||
*/
|
||||
@Override
|
||||
public Document fetch(DBRef dbRef) {
|
||||
return getReferenceLoader().fetchOne(DocumentReferenceQuery.forSingleDocument(Filters.eq("_id", dbRef.getId())),
|
||||
ReferenceCollection.fromDBRef(dbRef));
|
||||
|
||||
MongoCollection<Document> mongoCollection = getCollection(dbRef);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Fetching DBRef '{}' from {}.{}.", dbRef.getId(),
|
||||
StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName()
|
||||
: mongoCollection.getNamespace().getDatabaseName(),
|
||||
dbRef.getCollectionName());
|
||||
}
|
||||
|
||||
return mongoCollection.find(Filters.eq("_id", dbRef.getId())).first();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -130,14 +158,14 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db
|
||||
MongoCollection<Document> mongoCollection = getCollection(databaseSource);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace(String.format("Bulk fetching DBRefs %s from %s.%s.", ids,
|
||||
LOGGER.trace("Bulk fetching DBRefs {} from {}.{}.", ids,
|
||||
StringUtils.hasText(databaseSource.getDatabaseName()) ? databaseSource.getDatabaseName()
|
||||
: mongoCollection.getNamespace().getDatabaseName(),
|
||||
databaseSource.getCollectionName()));
|
||||
databaseSource.getCollectionName());
|
||||
}
|
||||
|
||||
List<Document> result = mongoCollection //
|
||||
.find(new Document(BasicMongoPersistentProperty.ID_FIELD_NAME, new Document("$in", ids))) //
|
||||
.find(new Document("_id", new Document("$in", ids))) //
|
||||
.into(new ArrayList<>());
|
||||
|
||||
return ids.stream() //
|
||||
@@ -157,9 +185,44 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, @Nullable DBRef dbref,
|
||||
DbRefResolverCallback callback, DbRefProxyHandler handler) {
|
||||
|
||||
Object lazyLoadingProxy = getProxyFactory().createLazyLoadingProxy(property, callback, dbref);
|
||||
Class<?> propertyType = property.getType();
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback);
|
||||
|
||||
return handler.populateId(property, dbref, lazyLoadingProxy);
|
||||
if (!propertyType.isInterface()) {
|
||||
|
||||
Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType));
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
|
||||
return handler.populateId(property, dbref, factory);
|
||||
}
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
|
||||
return handler.populateId(property, dbref, proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CGLib enhanced type for the given source type.
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getEnhancedTypeFor(Class<?> type) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
return enhancer.createClass();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -182,10 +245,253 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db
|
||||
private static Stream<Document> documentWithId(Object identifier, Collection<Document> documents) {
|
||||
|
||||
return documents.stream() //
|
||||
.filter(it -> it.get(BasicMongoPersistentProperty.ID_FIELD_NAME).equals(identifier)) //
|
||||
.filter(it -> it.get("_id").equals(identifier)) //
|
||||
.limit(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link MethodInterceptor} that is used within a lazy loading proxy. The property resolving is delegated to a
|
||||
* {@link DbRefResolverCallback}. The resolving process is triggered by a method invocation on the proxy and is
|
||||
* guaranteed to be performed only once.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD;
|
||||
|
||||
private final DbRefResolverCallback callback;
|
||||
private final MongoPersistentProperty property;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
private volatile boolean resolved;
|
||||
private final @Nullable DBRef dbref;
|
||||
private @Nullable Object result;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link LazyLoadingInterceptor} for the given {@link MongoPersistentProperty},
|
||||
* {@link PersistenceExceptionTranslator} and {@link DbRefResolverCallback}.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param dbref can be {@literal null}.
|
||||
* @param callback must not be {@literal null}.
|
||||
*/
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, @Nullable DBRef dbref,
|
||||
PersistenceExceptionTranslator exceptionTranslator, DbRefResolverCallback callback) {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "Exception translator must not be null!");
|
||||
Assert.notNull(callback, "Callback must not be null!");
|
||||
|
||||
this.dbref = dbref;
|
||||
this.callback = callback;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.aopalliance.intercept.MethodInterceptor#invoke(org.aopalliance.intercept.MethodInvocation)
|
||||
*/
|
||||
@Override
|
||||
public Object invoke(@Nullable MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.cglib.proxy.MethodInterceptor#intercept(java.lang.Object, java.lang.reflect.Method, java.lang.Object[], org.springframework.cglib.proxy.MethodProxy)
|
||||
*/
|
||||
@Nullable
|
||||
@Override
|
||||
public Object intercept(Object obj, Method method, Object[] args, @Nullable MethodProxy proxy) throws Throwable {
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return this.dbref;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(proxy);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(proxy, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode(proxy);
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a to string representation for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private String proxyToString(@Nullable Object proxy) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (dbref != null) {
|
||||
description.append(dbref.getCollectionName());
|
||||
description.append(":");
|
||||
description.append(dbref.getId());
|
||||
} else {
|
||||
description.append(System.identityHashCode(proxy));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hashcode for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @return
|
||||
*/
|
||||
private int proxyHashCode(@Nullable Object proxy) {
|
||||
return proxyToString(proxy).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an equality check for the given {@code proxy}.
|
||||
*
|
||||
* @param proxy
|
||||
* @param that
|
||||
* @return
|
||||
*/
|
||||
private boolean proxyEquals(@Nullable Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Will trigger the resolution if the proxy is not resolved already or return a previously resolved result.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the proxy into its backing object.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (resolved) {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Accessing already resolved lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace("Resolving lazy loading property {}.{}",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName());
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
|
||||
if (translatedException instanceof ClientSessionException) {
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex);
|
||||
}
|
||||
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Customization hook for obtaining the {@link MongoCollection} for a given {@link DBRef}.
|
||||
*
|
||||
@@ -198,10 +504,4 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db
|
||||
return MongoDatabaseUtils.getDatabase(dbref.getDatabaseName(), mongoDbFactory)
|
||||
.getCollection(dbref.getCollectionName(), Document.class);
|
||||
}
|
||||
|
||||
protected MongoCollection<Document> getCollection(ReferenceCollection context) {
|
||||
|
||||
return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(),
|
||||
Document.class);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.core.mapping.DBRef;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ReferenceResolver} implementation that uses a given {@link ReferenceLookupDelegate} to load and convert entity
|
||||
* associations expressed via a {@link MongoPersistentProperty persitent property}. Creates {@link LazyLoadingProxy
|
||||
* proxies} for associations that should be lazily loaded.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Anton Buzdalkin
|
||||
* @since 3.3
|
||||
*/
|
||||
public class DefaultReferenceResolver implements ReferenceResolver {
|
||||
|
||||
private final ReferenceLoader referenceLoader;
|
||||
private final LazyLoadingProxyFactory proxyFactory;
|
||||
|
||||
private final LookupFunction collectionLookupFunction = (filter, ctx) -> getReferenceLoader().fetchMany(filter, ctx);
|
||||
private final LookupFunction singleValueLookupFunction = (filter, ctx) -> {
|
||||
Document target = getReferenceLoader().fetchOne(filter, ctx);
|
||||
return target == null ? Collections.emptyList() : Collections.singleton(target);
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DefaultReferenceResolver}.
|
||||
*
|
||||
* @param referenceLoader must not be {@literal null}.
|
||||
* @param exceptionTranslator must not be {@literal null}.
|
||||
*/
|
||||
public DefaultReferenceResolver(ReferenceLoader referenceLoader, PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
Assert.notNull(referenceLoader, "ReferenceLoader must not be null!");
|
||||
Assert.notNull(exceptionTranslator, "ExceptionTranslator must not be null!");
|
||||
|
||||
this.referenceLoader = referenceLoader;
|
||||
this.proxyFactory = new LazyLoadingProxyFactory(exceptionTranslator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
|
||||
|
||||
LookupFunction lookupFunction = (property.isCollectionLike() || property.isMap()) ? collectionLookupFunction
|
||||
: singleValueLookupFunction;
|
||||
|
||||
if (isLazyReference(property)) {
|
||||
return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader);
|
||||
}
|
||||
|
||||
return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the association expressed by the given {@link MongoPersistentProperty property} should be resolved lazily.
|
||||
*
|
||||
* @param property
|
||||
* @return return {@literal true} if the defined association is lazy.
|
||||
* @see DBRef#lazy()
|
||||
* @see DocumentReference#lazy()
|
||||
*/
|
||||
protected boolean isLazyReference(MongoPersistentProperty property) {
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return property.getDocumentReference().lazy();
|
||||
}
|
||||
|
||||
return property.getDBRef() != null && property.getDBRef().lazy();
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link ReferenceLoader} executing the lookup.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
protected ReferenceLoader getReferenceLoader() {
|
||||
return referenceLoader;
|
||||
}
|
||||
|
||||
LazyLoadingProxyFactory getProxyFactory() {
|
||||
return proxyFactory;
|
||||
}
|
||||
|
||||
private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) {
|
||||
return proxyFactory.createLazyLoadingProxy(property, it -> {
|
||||
return referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader);
|
||||
}, source instanceof DocumentReferenceSource ? ((DocumentReferenceSource)source).getTargetSource() : source);
|
||||
}
|
||||
}
|
||||
@@ -91,7 +91,7 @@ class DocumentAccessor {
|
||||
public void put(MongoPersistentProperty prop, @Nullable Object value) {
|
||||
|
||||
Assert.notNull(prop, "MongoPersistentProperty must not be null!");
|
||||
String fieldName = getFieldName(prop);
|
||||
String fieldName = prop.getFieldName();
|
||||
|
||||
if (!fieldName.contains(".")) {
|
||||
BsonUtils.addToMap(document, fieldName, value);
|
||||
@@ -123,7 +123,7 @@ class DocumentAccessor {
|
||||
*/
|
||||
@Nullable
|
||||
public Object get(MongoPersistentProperty property) {
|
||||
return BsonUtils.resolveValue(document, getFieldName(property));
|
||||
return BsonUtils.resolveValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -150,11 +150,7 @@ class DocumentAccessor {
|
||||
|
||||
Assert.notNull(property, "Property must not be null!");
|
||||
|
||||
return BsonUtils.hasValue(document, getFieldName(property));
|
||||
}
|
||||
|
||||
String getFieldName(MongoPersistentProperty prop) {
|
||||
return prop.getFieldName();
|
||||
return BsonUtils.hasValue(document, property.getFieldName());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,259 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.WeakHashMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.types.ObjectId;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.dao.InvalidDataAccessApiUsageException;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
|
||||
/**
|
||||
* Internal API to construct {@link DocumentPointer} for a given property. Considers {@link LazyLoadingProxy},
|
||||
* registered {@link Object} to {@link DocumentPointer} {@link org.springframework.core.convert.converter.Converter},
|
||||
* simple {@literal _id} lookups and cases where the {@link DocumentPointer} needs to be computed via a lookup query.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
class DocumentPointerFactory {
|
||||
|
||||
private final ConversionService conversionService;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final Map<String, LinkageDocument> cache;
|
||||
|
||||
/**
|
||||
* A {@link Pattern} matching quoted and unquoted variants (with/out whitespaces) of
|
||||
* <code>{'_id' : ?#{#target} }</code>.
|
||||
*/
|
||||
private static final Pattern DEFAULT_LOOKUP_PATTERN = Pattern.compile("\\{\\s?" + // document start (whitespace opt)
|
||||
"['\"]?_id['\"]?" + // followed by an optionally quoted _id. Like: _id, '_id' or "_id"
|
||||
"?\\s?:\\s?" + // then a colon optionally wrapped inside whitespaces
|
||||
"['\"]?\\?#\\{#target\\}['\"]?" + // leading to the potentially quoted ?#{#target} expression
|
||||
"\\s*}"); // some optional whitespaces and document close
|
||||
|
||||
DocumentPointerFactory(ConversionService conversionService,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
|
||||
this.conversionService = conversionService;
|
||||
this.mappingContext = mappingContext;
|
||||
this.cache = new WeakHashMap<>();
|
||||
}
|
||||
|
||||
DocumentPointer<?> computePointer(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentProperty property, Object value, Class<?> typeHint) {
|
||||
|
||||
if (value instanceof LazyLoadingProxy) {
|
||||
return () -> ((LazyLoadingProxy) value).getSource();
|
||||
}
|
||||
|
||||
if (conversionService.canConvert(typeHint, DocumentPointer.class)) {
|
||||
return conversionService.convert(value, DocumentPointer.class);
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getRequiredPersistentEntity(property.getAssociationTargetType());
|
||||
|
||||
if (usesDefaultLookup(property)) {
|
||||
|
||||
MongoPersistentProperty idProperty = persistentEntity.getIdProperty();
|
||||
Object idValue = persistentEntity.getIdentifierAccessor(value).getIdentifier();
|
||||
|
||||
if (idProperty.hasExplicitWriteTarget()
|
||||
&& conversionService.canConvert(idValue.getClass(), idProperty.getFieldType())) {
|
||||
return () -> conversionService.convert(idValue, idProperty.getFieldType());
|
||||
}
|
||||
|
||||
if (idValue instanceof String && ObjectId.isValid((String) idValue)) {
|
||||
return () -> new ObjectId((String) idValue);
|
||||
}
|
||||
|
||||
return () -> idValue;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> valueEntity = mappingContext.getPersistentEntity(value.getClass());
|
||||
PersistentPropertyAccessor<Object> propertyAccessor;
|
||||
if (valueEntity == null) {
|
||||
propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value);
|
||||
} else {
|
||||
propertyAccessor = valueEntity.getPropertyPathAccessor(value);
|
||||
}
|
||||
|
||||
return cache.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::from)
|
||||
.getDocumentPointer(mappingContext, persistentEntity, propertyAccessor);
|
||||
}
|
||||
|
||||
private boolean usesDefaultLookup(MongoPersistentProperty property) {
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return DEFAULT_LOOKUP_PATTERN.matcher(property.getDocumentReference().lookup()).matches();
|
||||
}
|
||||
|
||||
Reference atReference = property.findAnnotation(Reference.class);
|
||||
if (atReference != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
throw new IllegalStateException(String.format("%s does not seem to be define Reference", property));
|
||||
}
|
||||
|
||||
/**
|
||||
* Value object that computes a document pointer from a given lookup query by identifying SpEL expressions and
|
||||
* inverting it.
|
||||
*
|
||||
* <pre class="code">
|
||||
* // source
|
||||
* { 'firstname' : ?#{fn}, 'lastname' : '?#{ln} }
|
||||
*
|
||||
* // target
|
||||
* { 'fn' : ..., 'ln' : ... }
|
||||
* </pre>
|
||||
*
|
||||
* The actual pointer is the computed via
|
||||
* {@link #getDocumentPointer(MappingContext, MongoPersistentEntity, PersistentPropertyAccessor)} applying values from
|
||||
* the provided {@link PersistentPropertyAccessor} to the target document by looking at the keys of the expressions
|
||||
* from the source.
|
||||
*/
|
||||
static class LinkageDocument {
|
||||
|
||||
static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\?#\\{#?(?<fieldName>[\\w\\d\\.\\-)]*)\\}");
|
||||
static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("###_(?<index>\\d*)_###");
|
||||
|
||||
private final String lookup;
|
||||
private final org.bson.Document documentPointer;
|
||||
private final Map<String, String> placeholderMap;
|
||||
private final boolean isSimpleTargetPointer;
|
||||
|
||||
static LinkageDocument from(String lookup) {
|
||||
return new LinkageDocument(lookup);
|
||||
}
|
||||
|
||||
private LinkageDocument(String lookup) {
|
||||
|
||||
this.lookup = lookup;
|
||||
this.placeholderMap = new LinkedHashMap<>();
|
||||
|
||||
int index = 0;
|
||||
Matcher matcher = EXPRESSION_PATTERN.matcher(lookup);
|
||||
String targetLookup = lookup;
|
||||
|
||||
while (matcher.find()) {
|
||||
|
||||
String expression = matcher.group();
|
||||
String fieldName = matcher.group("fieldName").replace("target.", "");
|
||||
|
||||
String placeholder = placeholder(index);
|
||||
placeholderMap.put(placeholder, fieldName);
|
||||
targetLookup = targetLookup.replace(expression, "'" + placeholder + "'");
|
||||
index++;
|
||||
}
|
||||
|
||||
this.documentPointer = org.bson.Document.parse(targetLookup);
|
||||
this.isSimpleTargetPointer = placeholderMap.size() == 1 && placeholderMap.containsValue("target")
|
||||
&& lookup.contains("#target");
|
||||
}
|
||||
|
||||
private String placeholder(int index) {
|
||||
return "###_" + index + "_###";
|
||||
}
|
||||
|
||||
private boolean isPlaceholder(String key) {
|
||||
return PLACEHOLDER_PATTERN.matcher(key).matches();
|
||||
}
|
||||
|
||||
DocumentPointer<Object> getDocumentPointer(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
|
||||
return () -> updatePlaceholders(documentPointer, new Document(), mappingContext, persistentEntity,
|
||||
propertyAccessor);
|
||||
}
|
||||
|
||||
Object updatePlaceholders(org.bson.Document source, org.bson.Document target,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
MongoPersistentEntity<?> persistentEntity, PersistentPropertyAccessor<?> propertyAccessor) {
|
||||
|
||||
for (Entry<String, Object> entry : source.entrySet()) {
|
||||
|
||||
if (entry.getKey().startsWith("$")) {
|
||||
throw new InvalidDataAccessApiUsageException(String.format(
|
||||
"Cannot derive document pointer from lookup '%s' using query operator (%s). Please consider registering a custom converter.",
|
||||
lookup, entry.getKey()));
|
||||
}
|
||||
|
||||
if (entry.getValue() instanceof Document) {
|
||||
|
||||
MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey());
|
||||
if (persistentProperty != null && persistentProperty.isEntity()) {
|
||||
|
||||
MongoPersistentEntity<?> nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType());
|
||||
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
|
||||
nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty))));
|
||||
} else {
|
||||
target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext,
|
||||
persistentEntity, propertyAccessor));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (placeholderMap.containsKey(entry.getValue())) {
|
||||
|
||||
String attribute = placeholderMap.get(entry.getValue());
|
||||
if (attribute.contains(".")) {
|
||||
attribute = attribute.substring(attribute.lastIndexOf('.') + 1);
|
||||
}
|
||||
|
||||
String fieldName = entry.getKey().equals("_id") ? "id" : entry.getKey();
|
||||
if (!fieldName.contains(".")) {
|
||||
|
||||
Object targetValue = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(fieldName));
|
||||
target.put(attribute, targetValue);
|
||||
continue;
|
||||
}
|
||||
|
||||
PersistentPropertyPath<?> path = mappingContext
|
||||
.getPersistentPropertyPath(PropertyPath.from(fieldName, persistentEntity.getTypeInformation()));
|
||||
Object targetValue = propertyAccessor.getProperty(path);
|
||||
target.put(attribute, targetValue);
|
||||
continue;
|
||||
}
|
||||
|
||||
target.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
if (target.size() == 1 && isSimpleTargetPointer) {
|
||||
return target.values().iterator().next();
|
||||
}
|
||||
|
||||
return target;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* The source object to resolve document references upon. Encapsulates the actual source and the reference specific
|
||||
* values.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class DocumentReferenceSource {
|
||||
|
||||
private final Object self;
|
||||
|
||||
private final @Nullable Object targetSource;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DocumentReferenceSource}.
|
||||
*
|
||||
* @param self the entire wrapper object holding references. Must not be {@literal null}.
|
||||
* @param targetSource the reference value source.
|
||||
*/
|
||||
DocumentReferenceSource(Object self, @Nullable Object targetSource) {
|
||||
|
||||
this.self = self;
|
||||
this.targetSource = targetSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the outer document.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public Object getSelf() {
|
||||
return self;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the actual (property specific) reference value.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public Object getTargetSource() {
|
||||
return targetSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dereference a {@code targetSource} if it is a {@link DocumentReferenceSource} or return {@code source} otherwise.
|
||||
*
|
||||
* @param source
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
static Object getTargetSource(Object source) {
|
||||
return source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() : source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dereference a {@code self} object if it is a {@link DocumentReferenceSource} or return {@code self} otherwise.
|
||||
*
|
||||
* @param self
|
||||
* @return
|
||||
*/
|
||||
static Object getSelf(Object self) {
|
||||
return self instanceof DocumentReferenceSource ? ((DocumentReferenceSource) self).getSelf() : self;
|
||||
}
|
||||
}
|
||||
@@ -15,18 +15,18 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* Allows direct interaction with the underlying {@code LazyLoadingInterceptor}.
|
||||
* Allows direct interaction with the underlying {@link LazyLoadingInterceptor}.
|
||||
*
|
||||
* @author Thomas Darimont
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 1.5
|
||||
* @see LazyLoadingProxyFactory
|
||||
*/
|
||||
public interface LazyLoadingProxy {
|
||||
|
||||
@@ -46,15 +46,4 @@ public interface LazyLoadingProxy {
|
||||
*/
|
||||
@Nullable
|
||||
DBRef toDBRef();
|
||||
|
||||
/**
|
||||
* Returns the raw {@literal source} object that defines the reference.
|
||||
*
|
||||
* @return can be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
@Nullable
|
||||
default Object getSource() {
|
||||
return toDBRef();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,303 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import static org.springframework.util.ReflectionUtils.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.aopalliance.intercept.MethodInterceptor;
|
||||
import org.aopalliance.intercept.MethodInvocation;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.cglib.proxy.Callback;
|
||||
import org.springframework.cglib.proxy.Enhancer;
|
||||
import org.springframework.cglib.proxy.Factory;
|
||||
import org.springframework.cglib.proxy.MethodProxy;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mongodb.ClientSessionException;
|
||||
import org.springframework.data.mongodb.LazyLoadingException;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.objenesis.ObjenesisStd;
|
||||
import org.springframework.util.ReflectionUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
|
||||
/**
|
||||
* {@link ProxyFactory} to create a proxy for {@link MongoPersistentProperty#getType()} to resolve a reference lazily.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
class LazyLoadingProxyFactory {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(LazyLoadingProxyFactory.class);
|
||||
|
||||
private final ObjenesisStd objenesis;
|
||||
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
|
||||
public LazyLoadingProxyFactory(PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.objenesis = new ObjenesisStd(true);
|
||||
}
|
||||
|
||||
public Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback,
|
||||
Object source) {
|
||||
|
||||
Class<?> propertyType = property.getType();
|
||||
LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, callback, source, exceptionTranslator);
|
||||
|
||||
if (!propertyType.isInterface()) {
|
||||
|
||||
Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType));
|
||||
factory.setCallbacks(new Callback[] { interceptor });
|
||||
|
||||
return factory;
|
||||
}
|
||||
|
||||
ProxyFactory proxyFactory = new ProxyFactory();
|
||||
|
||||
for (Class<?> type : propertyType.getInterfaces()) {
|
||||
proxyFactory.addInterface(type);
|
||||
}
|
||||
|
||||
proxyFactory.addInterface(LazyLoadingProxy.class);
|
||||
proxyFactory.addInterface(propertyType);
|
||||
proxyFactory.addAdvice(interceptor);
|
||||
|
||||
return proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the CGLib enhanced type for the given source type.
|
||||
*
|
||||
* @param type
|
||||
* @return
|
||||
*/
|
||||
private Class<?> getEnhancedTypeFor(Class<?> type) {
|
||||
|
||||
Enhancer enhancer = new Enhancer();
|
||||
enhancer.setSuperclass(type);
|
||||
enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class);
|
||||
enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class });
|
||||
|
||||
return enhancer.createClass();
|
||||
}
|
||||
|
||||
public static class LazyLoadingInterceptor
|
||||
implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable {
|
||||
|
||||
private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD;
|
||||
|
||||
static {
|
||||
try {
|
||||
INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget");
|
||||
TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef");
|
||||
FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize");
|
||||
GET_SOURCE_METHOD = LazyLoadingProxy.class.getMethod("getSource");
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private final MongoPersistentProperty property;
|
||||
private final DbRefResolverCallback callback;
|
||||
private final Object source;
|
||||
private final PersistenceExceptionTranslator exceptionTranslator;
|
||||
private volatile boolean resolved;
|
||||
private @Nullable Object result;
|
||||
|
||||
public LazyLoadingInterceptor(MongoPersistentProperty property, DbRefResolverCallback callback, Object source,
|
||||
PersistenceExceptionTranslator exceptionTranslator) {
|
||||
|
||||
this.property = property;
|
||||
this.callback = callback;
|
||||
this.source = source;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object invoke(MethodInvocation invocation) throws Throwable {
|
||||
return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable {
|
||||
|
||||
if (INITIALIZE_METHOD.equals(method)) {
|
||||
return ensureResolved();
|
||||
}
|
||||
|
||||
if (TO_DBREF_METHOD.equals(method)) {
|
||||
return source instanceof DBRef ? source : null;
|
||||
}
|
||||
|
||||
if (GET_SOURCE_METHOD.equals(method)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) {
|
||||
|
||||
if (ReflectionUtils.isToStringMethod(method)) {
|
||||
return proxyToString(source);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isEqualsMethod(method)) {
|
||||
return proxyEquals(o, args[0]);
|
||||
}
|
||||
|
||||
if (ReflectionUtils.isHashCodeMethod(method)) {
|
||||
return proxyHashCode();
|
||||
}
|
||||
|
||||
// DATAMONGO-1076 - finalize methods should not trigger proxy initialization
|
||||
if (FINALIZE_METHOD.equals(method)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Object target = ensureResolved();
|
||||
|
||||
if (target == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ReflectionUtils.makeAccessible(method);
|
||||
|
||||
return method.invoke(target, args);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private Object ensureResolved() {
|
||||
|
||||
if (!resolved) {
|
||||
this.result = resolve();
|
||||
this.resolved = true;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
}
|
||||
|
||||
private String proxyToString(@Nullable Object source) {
|
||||
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (source != null) {
|
||||
if (source instanceof DBRef) {
|
||||
description.append(((DBRef) source).getCollectionName());
|
||||
description.append(":");
|
||||
description.append(((DBRef) source).getId());
|
||||
} else {
|
||||
description.append(source);
|
||||
}
|
||||
} else {
|
||||
description.append(System.identityHashCode(source));
|
||||
}
|
||||
description.append("$").append(LazyLoadingProxy.class.getSimpleName());
|
||||
|
||||
return description.toString();
|
||||
}
|
||||
|
||||
private boolean proxyEquals(@Nullable Object proxy, Object that) {
|
||||
|
||||
if (!(that instanceof LazyLoadingProxy)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (that == proxy) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return proxyToString(proxy).equals(that.toString());
|
||||
}
|
||||
|
||||
private int proxyHashCode() {
|
||||
return proxyToString(source).hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for serialization.
|
||||
*
|
||||
* @param out
|
||||
* @throws IOException
|
||||
*/
|
||||
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||
|
||||
ensureResolved();
|
||||
out.writeObject(this.result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback method for deserialization.
|
||||
*
|
||||
* @param in
|
||||
* @throws IOException
|
||||
*/
|
||||
private void readObject(ObjectInputStream in) throws IOException {
|
||||
|
||||
try {
|
||||
this.resolved = true;
|
||||
this.result = in.readObject();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new LazyLoadingException("Could not deserialize result", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private synchronized Object resolve() {
|
||||
|
||||
if (resolved) {
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace(String.format("Accessing already resolved lazy loading property %s.%s",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace(String.format("Resolving lazy loading property %s.%s",
|
||||
property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()));
|
||||
}
|
||||
|
||||
return callback.resolve(property);
|
||||
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
DataAccessException translatedException = exceptionTranslator.translateExceptionIfPossible(ex);
|
||||
|
||||
if (translatedException instanceof ClientSessionException) {
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex);
|
||||
}
|
||||
|
||||
throw new LazyLoadingException("Unable to lazily resolve DBRef!",
|
||||
translatedException != null ? translatedException : ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -27,36 +27,25 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.Codec;
|
||||
import org.bson.codecs.DecoderContext;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.json.JsonReader;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.BeanClassLoaderAware;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.core.CollectionFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.support.DefaultConversionService;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mapping.AccessOptions;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.PersistentPropertyPath;
|
||||
import org.springframework.data.mapping.PersistentPropertyPathAccessor;
|
||||
import org.springframework.data.mapping.PreferredConstructor;
|
||||
import org.springframework.data.mapping.PreferredConstructor.Parameter;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
@@ -72,11 +61,8 @@ import org.springframework.data.mapping.model.SpELExpressionEvaluator;
|
||||
import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped.OnEmpty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback;
|
||||
@@ -84,11 +70,7 @@ import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent;
|
||||
import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.data.util.Predicates;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -115,7 +97,6 @@ import com.mongodb.DBRef;
|
||||
* @author Mark Paluch
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Heesu Jung
|
||||
* @author Divya Srivastava
|
||||
*/
|
||||
public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
|
||||
|
||||
@@ -124,24 +105,20 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
public static final ClassTypeInformation<Bson> BSON = ClassTypeInformation.from(Bson.class);
|
||||
|
||||
protected static final Log LOGGER = LogFactory.getLog(MappingMongoConverter.class);
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
|
||||
|
||||
protected final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
protected final QueryMapper idMapper;
|
||||
protected final DbRefResolver dbRefResolver;
|
||||
protected final DefaultDbRefProxyHandler dbRefProxyHandler;
|
||||
protected final ReferenceLookupDelegate referenceLookupDelegate;
|
||||
|
||||
protected @Nullable ApplicationContext applicationContext;
|
||||
protected MongoTypeMapper typeMapper;
|
||||
protected @Nullable String mapKeyDotReplacement = null;
|
||||
protected @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private MongoTypeMapper defaultTypeMapper;
|
||||
private SpELContext spELContext;
|
||||
private @Nullable EntityCallbacks entityCallbacks;
|
||||
private final DocumentPointerFactory documentPointerFactory;
|
||||
private final SpelAwareProxyProjectionFactory projectionFactory = new SpelAwareProxyProjectionFactory();
|
||||
|
||||
/**
|
||||
* Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}.
|
||||
@@ -158,9 +135,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null!");
|
||||
|
||||
this.dbRefResolver = dbRefResolver;
|
||||
|
||||
this.mappingContext = mappingContext;
|
||||
this.defaultTypeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext,
|
||||
this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext,
|
||||
this::getWriteTarget);
|
||||
this.idMapper = new QueryMapper(this);
|
||||
|
||||
@@ -171,9 +147,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
ConversionContext context = getConversionContext(path);
|
||||
return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator);
|
||||
});
|
||||
|
||||
this.referenceLookupDelegate = new ReferenceLookupDelegate(mappingContext, spELContext);
|
||||
this.documentPointerFactory = new DocumentPointerFactory(conversionService, mappingContext);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -213,7 +186,9 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
* @param typeMapper the typeMapper to set. Can be {@literal null}.
|
||||
*/
|
||||
public void setTypeMapper(@Nullable MongoTypeMapper typeMapper) {
|
||||
this.typeMapper = typeMapper;
|
||||
this.typeMapper = typeMapper == null
|
||||
? new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext)
|
||||
: typeMapper;
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -222,17 +197,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
@Override
|
||||
public MongoTypeMapper getTypeMapper() {
|
||||
return this.typeMapper == null ? this.defaultTypeMapper : this.typeMapper;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProjectionFactory getProjectionFactory() {
|
||||
return projectionFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CustomConversions getCustomConversions() {
|
||||
return conversions;
|
||||
return this.typeMapper;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -277,23 +242,16 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
this.applicationContext = applicationContext;
|
||||
this.spELContext = new SpELContext(this.spELContext, applicationContext);
|
||||
this.projectionFactory.setBeanFactory(applicationContext);
|
||||
this.projectionFactory.setBeanClassLoader(applicationContext.getClassLoader());
|
||||
|
||||
if (entityCallbacks == null) {
|
||||
setEntityCallbacks(EntityCallbacks.create(applicationContext));
|
||||
}
|
||||
|
||||
ClassLoader classLoader = applicationContext.getClassLoader();
|
||||
if (this.defaultTypeMapper instanceof BeanClassLoaderAware && classLoader != null) {
|
||||
((BeanClassLoaderAware) this.defaultTypeMapper).setBeanClassLoader(classLoader);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link EntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link AfterConvertCallback}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Overrides potentially existing {@link EntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
@@ -306,155 +264,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R> R project(EntityProjection<R, ?> projection, Bson bson) {
|
||||
|
||||
if (!projection.isProjection()) { // backed by real object
|
||||
|
||||
TypeInformation<?> typeToRead = projection.getMappedType().getType().isInterface() ? projection.getDomainType()
|
||||
: projection.getMappedType();
|
||||
return (R) read(typeToRead, bson);
|
||||
}
|
||||
|
||||
ProjectingConversionContext context = new ProjectingConversionContext(conversions, ObjectPath.ROOT,
|
||||
this::readCollectionOrArray, this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead,
|
||||
projection);
|
||||
|
||||
return doReadProjection(context, bson, projection);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <R> R doReadProjection(ConversionContext context, Bson bson,
|
||||
EntityProjection<R, ?> projection) {
|
||||
|
||||
MongoPersistentEntity<?> entity = getMappingContext().getRequiredPersistentEntity(projection.getActualDomainType());
|
||||
TypeInformation<?> mappedType = projection.getActualMappedType();
|
||||
MongoPersistentEntity<R> mappedEntity = (MongoPersistentEntity<R>) getMappingContext()
|
||||
.getPersistentEntity(mappedType);
|
||||
SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext);
|
||||
|
||||
boolean isInterfaceProjection = mappedType.getType().isInterface();
|
||||
if (isInterfaceProjection) {
|
||||
|
||||
PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(mappedEntity);
|
||||
DocumentAccessor documentAccessor = new DocumentAccessor(bson);
|
||||
PersistentPropertyAccessor<?> accessor = new MapPersistentPropertyAccessor();
|
||||
|
||||
PersistentPropertyAccessor<?> convertingAccessor = PropertyTranslatingPropertyAccessor
|
||||
.create(new ConvertingPropertyAccessor<>(accessor, conversionService), propertyTranslator);
|
||||
MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(context, documentAccessor,
|
||||
evaluator);
|
||||
|
||||
readProperties(context, entity, convertingAccessor, documentAccessor, valueProvider, evaluator,
|
||||
Predicates.isTrue());
|
||||
return (R) projectionFactory.createProjection(mappedType.getType(), accessor.getBean());
|
||||
}
|
||||
|
||||
// DTO projection
|
||||
if (mappedEntity == null) {
|
||||
throw new MappingException(String.format("No mapping metadata found for %s", mappedType.getType().getName()));
|
||||
}
|
||||
|
||||
// create target instance, merge metadata from underlying DTO type
|
||||
PersistentPropertyTranslator propertyTranslator = PersistentPropertyTranslator.create(entity,
|
||||
Predicates.negate(MongoPersistentProperty::hasExplicitFieldName));
|
||||
DocumentAccessor documentAccessor = new DocumentAccessor(bson) {
|
||||
@Override
|
||||
String getFieldName(MongoPersistentProperty prop) {
|
||||
return propertyTranslator.translate(prop).getFieldName();
|
||||
}
|
||||
};
|
||||
|
||||
PreferredConstructor<?, MongoPersistentProperty> persistenceConstructor = mappedEntity.getPersistenceConstructor();
|
||||
ParameterValueProvider<MongoPersistentProperty> provider = persistenceConstructor != null
|
||||
&& persistenceConstructor.hasParameters()
|
||||
? getParameterProvider(context, mappedEntity, documentAccessor, evaluator)
|
||||
: NoOpParameterValueProvider.INSTANCE;
|
||||
|
||||
EntityInstantiator instantiator = instantiators.getInstantiatorFor(mappedEntity);
|
||||
R instance = instantiator.createInstance(mappedEntity, provider);
|
||||
PersistentPropertyAccessor<R> accessor = mappedEntity.getPropertyAccessor(instance);
|
||||
|
||||
populateProperties(context, mappedEntity, documentAccessor, evaluator, instance);
|
||||
|
||||
PersistentPropertyAccessor<?> convertingAccessor = new ConvertingPropertyAccessor<>(accessor, conversionService);
|
||||
MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(context, documentAccessor, evaluator);
|
||||
|
||||
readProperties(context, mappedEntity, convertingAccessor, documentAccessor, valueProvider, evaluator,
|
||||
Predicates.isTrue());
|
||||
|
||||
return accessor.getBean();
|
||||
}
|
||||
|
||||
private Object doReadOrProject(ConversionContext context, Bson source, TypeInformation<?> typeHint,
|
||||
EntityProjection<?, ?> typeDescriptor) {
|
||||
|
||||
if (typeDescriptor.isProjection()) {
|
||||
return doReadProjection(context, BsonUtils.asDocument(source), typeDescriptor);
|
||||
}
|
||||
|
||||
return readDocument(context, source, typeHint);
|
||||
}
|
||||
|
||||
class ProjectingConversionContext extends ConversionContext {
|
||||
|
||||
private final EntityProjection<?, ?> returnedTypeDescriptor;
|
||||
|
||||
ProjectingConversionContext(CustomConversions customConversions, ObjectPath path,
|
||||
ContainerValueConverter<Collection<?>> collectionConverter, ContainerValueConverter<Bson> mapConverter,
|
||||
ContainerValueConverter<DBRef> dbRefConverter, ValueConverter<Object> elementConverter,
|
||||
EntityProjection<?, ?> projection) {
|
||||
super(customConversions, path,
|
||||
(context, source, typeHint) -> doReadOrProject(context, source, typeHint, projection),
|
||||
|
||||
collectionConverter, mapConverter, dbRefConverter, elementConverter);
|
||||
this.returnedTypeDescriptor = projection;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConversionContext forProperty(String name) {
|
||||
|
||||
EntityProjection<?, ?> property = returnedTypeDescriptor.findProperty(name);
|
||||
if (property == null) {
|
||||
return super.forProperty(name);
|
||||
}
|
||||
|
||||
return new ProjectingConversionContext(conversions, path, collectionConverter, mapConverter, dbRefConverter,
|
||||
elementConverter, property);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConversionContext withPath(ObjectPath currentPath) {
|
||||
return new ProjectingConversionContext(conversions, currentPath, collectionConverter, mapConverter,
|
||||
dbRefConverter, elementConverter, returnedTypeDescriptor);
|
||||
}
|
||||
}
|
||||
|
||||
static class MapPersistentPropertyAccessor implements PersistentPropertyAccessor<Map<String, Object>> {
|
||||
|
||||
Map<String, Object> map = new LinkedHashMap<>();
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentProperty<?> persistentProperty, Object o) {
|
||||
map.put(persistentProperty.getName(), o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(PersistentProperty<?> persistentProperty) {
|
||||
return map.get(persistentProperty.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> getBean() {
|
||||
return map;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.core.MongoReader#read(java.lang.Class, com.mongodb.Document)
|
||||
*/
|
||||
public <S extends Object> S read(Class<S> clazz, Bson bson) {
|
||||
public <S extends Object> S read(Class<S> clazz, final Bson bson) {
|
||||
return read(ClassTypeInformation.from(clazz), bson);
|
||||
}
|
||||
|
||||
@@ -476,7 +290,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
TypeInformation<? extends S> typeHint) {
|
||||
|
||||
Document document = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson;
|
||||
TypeInformation<? extends S> typeToRead = getTypeMapper().readType(document, typeHint);
|
||||
TypeInformation<? extends S> typeToRead = typeMapper.readType(document, typeHint);
|
||||
Class<? extends S> rawType = typeToRead.getType();
|
||||
|
||||
if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) {
|
||||
@@ -538,18 +352,11 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
parameterProvider);
|
||||
}
|
||||
|
||||
private <S> S read(ConversionContext context, MongoPersistentEntity<S> entity, Document bson) {
|
||||
private <S extends Object> S read(ConversionContext context, MongoPersistentEntity<S> entity, Document bson) {
|
||||
|
||||
SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext);
|
||||
DocumentAccessor documentAccessor = new DocumentAccessor(bson);
|
||||
|
||||
if (hasIdentifier(bson)) {
|
||||
S existing = findContextualEntity(context, entity, bson);
|
||||
if (existing != null) {
|
||||
return existing;
|
||||
}
|
||||
}
|
||||
|
||||
PreferredConstructor<S, MongoPersistentProperty> persistenceConstructor = entity.getPersistenceConstructor();
|
||||
|
||||
ParameterValueProvider<MongoPersistentProperty> provider = persistenceConstructor != null
|
||||
@@ -560,23 +367,12 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
S instance = instantiator.createInstance(entity, provider);
|
||||
|
||||
if (entity.requiresPropertyPopulation()) {
|
||||
|
||||
return populateProperties(context, entity, documentAccessor, evaluator, instance);
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
|
||||
private boolean hasIdentifier(Document bson) {
|
||||
return bson.get(BasicMongoPersistentProperty.ID_FIELD_NAME) != null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private <S> S findContextualEntity(ConversionContext context, MongoPersistentEntity<S> entity, Document bson) {
|
||||
return context.getPath().getPathItem(bson.get(BasicMongoPersistentProperty.ID_FIELD_NAME), entity.getCollection(),
|
||||
entity.getType());
|
||||
}
|
||||
|
||||
private <S> S populateProperties(ConversionContext context, MongoPersistentEntity<S> entity,
|
||||
DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator, S instance) {
|
||||
|
||||
@@ -592,8 +388,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(contextToUse, documentAccessor,
|
||||
evaluator);
|
||||
|
||||
Predicate<MongoPersistentProperty> propertyFilter = isIdentifier(entity).or(isConstructorArgument(entity)).negate();
|
||||
readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator, propertyFilter);
|
||||
readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator);
|
||||
|
||||
return accessor.getBean();
|
||||
}
|
||||
@@ -635,56 +430,50 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
private void readProperties(ConversionContext context, MongoPersistentEntity<?> entity,
|
||||
PersistentPropertyAccessor<?> accessor, DocumentAccessor documentAccessor,
|
||||
MongoDbPropertyValueProvider valueProvider, SpELExpressionEvaluator evaluator,
|
||||
Predicate<MongoPersistentProperty> propertyFilter) {
|
||||
MongoDbPropertyValueProvider valueProvider, SpELExpressionEvaluator evaluator) {
|
||||
|
||||
DbRefResolverCallback callback = null;
|
||||
|
||||
for (MongoPersistentProperty prop : entity) {
|
||||
|
||||
if (!propertyFilter.test(prop)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ConversionContext propertyContext = context.forProperty(prop.getName());
|
||||
MongoDbPropertyValueProvider valueProviderToUse = valueProvider.withContext(propertyContext);
|
||||
|
||||
if (prop.isAssociation() && !entity.isConstructorArgument(prop)) {
|
||||
|
||||
if (callback == null) {
|
||||
callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator);
|
||||
callback = getDbRefResolverCallback(context, documentAccessor, evaluator);
|
||||
}
|
||||
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback,
|
||||
propertyContext,
|
||||
evaluator);
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prop.isUnwrapped()) {
|
||||
|
||||
accessor.setProperty(prop,
|
||||
readUnwrapped(propertyContext, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop)));
|
||||
readUnwrapped(context, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop)));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!documentAccessor.hasValue(prop)) {
|
||||
// We skip the id property since it was already set
|
||||
|
||||
if (entity.isIdProperty(prop)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entity.isConstructorArgument(prop) || !documentAccessor.hasValue(prop)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prop.isAssociation()) {
|
||||
|
||||
if (callback == null) {
|
||||
callback = getDbRefResolverCallback(propertyContext, documentAccessor, evaluator);
|
||||
callback = getDbRefResolverCallback(context, documentAccessor, evaluator);
|
||||
}
|
||||
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback,
|
||||
propertyContext,
|
||||
evaluator);
|
||||
readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback);
|
||||
continue;
|
||||
}
|
||||
|
||||
accessor.setProperty(prop, valueProviderToUse.getPropertyValue(prop));
|
||||
accessor.setProperty(prop, valueProvider.getPropertyValue(prop));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -696,43 +485,16 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
private void readAssociation(Association<MongoPersistentProperty> association, PersistentPropertyAccessor<?> accessor,
|
||||
DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback,
|
||||
ConversionContext context, SpELExpressionEvaluator evaluator) {
|
||||
DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback) {
|
||||
|
||||
MongoPersistentProperty property = association.getInverse();
|
||||
Object value = documentAccessor.get(property);
|
||||
|
||||
if (property.isDocumentReference()
|
||||
|| (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) {
|
||||
|
||||
// quite unusual but sounds like worth having?
|
||||
|
||||
if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) {
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
DocumentPointer<?> pointer = () -> value;
|
||||
|
||||
// collection like special treatment
|
||||
accessor.setProperty(property, conversionService.convert(pointer, property.getActualType()));
|
||||
} else {
|
||||
|
||||
accessor.setProperty(property,
|
||||
dbRefResolver.resolveReference(property,
|
||||
new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)),
|
||||
referenceLookupDelegate, context::convert));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
DBRef dbref = value instanceof DBRef ? (DBRef) value : null;
|
||||
|
||||
accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler));
|
||||
}
|
||||
|
||||
@@ -773,49 +535,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return createDBRef(object, referringProperty);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
|
||||
|
||||
if (source instanceof LazyLoadingProxy) {
|
||||
return () -> ((LazyLoadingProxy) source).getSource();
|
||||
}
|
||||
|
||||
Assert.notNull(referringProperty, "Cannot create DocumentReference. The referringProperty must not be null!");
|
||||
|
||||
if (referringProperty.isDbReference()) {
|
||||
return () -> toDBRef(source, referringProperty);
|
||||
}
|
||||
|
||||
if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) {
|
||||
return createDocumentPointer(source, referringProperty);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("The referringProperty is neither a DBRef nor a document reference");
|
||||
}
|
||||
|
||||
DocumentPointer<?> createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
|
||||
|
||||
if (referringProperty == null) {
|
||||
return () -> source;
|
||||
}
|
||||
|
||||
if (source instanceof DocumentPointer) {
|
||||
return (DocumentPointer<?>) source;
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignableValue(referringProperty.getType(), source)
|
||||
&& conversionService.canConvert(referringProperty.getType(), DocumentPointer.class)) {
|
||||
return conversionService.convert(source, DocumentPointer.class);
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) {
|
||||
return documentPointerFactory.computePointer(mappingContext, referringProperty, source,
|
||||
referringProperty.getActualType());
|
||||
}
|
||||
|
||||
return () -> source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Root entry method into write conversion. Adds a type discriminator to the {@link Document}. Shouldn't be called for
|
||||
* nested conversions.
|
||||
@@ -837,7 +556,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
BsonUtils.removeNullId(bson);
|
||||
|
||||
if (requiresTypeHint(entityType)) {
|
||||
getTypeMapper().writeType(type, bson);
|
||||
typeMapper.writeType(type, bson);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -923,7 +642,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
continue;
|
||||
}
|
||||
if (prop.isAssociation()) {
|
||||
|
||||
writeAssociation(prop.getRequiredAssociation(), accessor, dbObjectAccessor);
|
||||
continue;
|
||||
}
|
||||
@@ -931,10 +649,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Object value = accessor.getProperty(prop);
|
||||
|
||||
if (value == null) {
|
||||
if (prop.writeNullValues()) {
|
||||
dbObjectAccessor.put(prop, null);
|
||||
}
|
||||
} else if (!conversions.isSimpleType(value.getClass())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!conversions.isSimpleType(value.getClass())) {
|
||||
writePropertyInternal(value, dbObjectAccessor, prop);
|
||||
} else {
|
||||
writeSimpleInternal(value, bson, prop);
|
||||
@@ -947,14 +665,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
MongoPersistentProperty inverseProp = association.getInverse();
|
||||
|
||||
Object value = accessor.getProperty(inverseProp);
|
||||
|
||||
if (value == null && !inverseProp.isUnwrapped() && inverseProp.writeNullValues()) {
|
||||
dbObjectAccessor.put(inverseProp, null);
|
||||
return;
|
||||
}
|
||||
|
||||
writePropertyInternal(value, dbObjectAccessor, inverseProp);
|
||||
writePropertyInternal(accessor.getProperty(inverseProp), dbObjectAccessor, inverseProp);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
@@ -1008,13 +719,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return;
|
||||
}
|
||||
|
||||
if (prop.isAssociation() && prop.isAnnotationPresent(Reference.class)) {
|
||||
|
||||
accessor.put(prop, new DocumentPointerFactory(conversionService, mappingContext)
|
||||
.computePointer(mappingContext, prop, obj, valueType.getType()).getPointer());
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* If we have a LazyLoadingProxy we make sure it is initialized first.
|
||||
*/
|
||||
@@ -1053,22 +757,10 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (!property.isDbReference()) {
|
||||
|
||||
if (property.isAssociation()) {
|
||||
|
||||
List<Object> targetCollection = collection.stream().map(it -> {
|
||||
return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType())
|
||||
.getPointer();
|
||||
}).collect(Collectors.toList());
|
||||
|
||||
return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class),
|
||||
new ArrayList<>());
|
||||
}
|
||||
|
||||
if (property.hasExplicitWriteTarget()) {
|
||||
return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>());
|
||||
}
|
||||
|
||||
return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>());
|
||||
return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList());
|
||||
}
|
||||
|
||||
List<Object> dbList = new ArrayList<>(collection.size());
|
||||
@@ -1097,7 +789,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(map, "Given map must not be null!");
|
||||
Assert.notNull(property, "PersistentProperty must not be null!");
|
||||
|
||||
if (!property.isAssociation()) {
|
||||
if (!property.isDbReference()) {
|
||||
return writeMapInternal(map, new Document(), property.getTypeInformation());
|
||||
}
|
||||
|
||||
@@ -1111,12 +803,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
if (conversions.isSimpleType(key.getClass())) {
|
||||
|
||||
String simpleKey = prepareMapKey(key.toString());
|
||||
if (property.isDbReference()) {
|
||||
document.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
} else {
|
||||
document.put(simpleKey, documentPointerFactory
|
||||
.computePointer(mappingContext, property, value, property.getActualType()).getPointer());
|
||||
}
|
||||
document.put(simpleKey, value != null ? createDBRef(value, property) : null);
|
||||
|
||||
} else {
|
||||
throw new MappingException("Cannot use a complex object as a key value.");
|
||||
@@ -1153,7 +840,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
collection.add(getPotentiallyConvertedSimpleWrite(element,
|
||||
componentType != null ? componentType.getType() : Object.class));
|
||||
} else if (element instanceof Collection || elementType.isArray()) {
|
||||
collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new ArrayList<>()));
|
||||
collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new BasicDBList()));
|
||||
} else {
|
||||
Document document = new Document();
|
||||
writeInternal(element, document, componentType);
|
||||
@@ -1185,7 +872,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
writeSimpleInternal(val, bson, simpleKey);
|
||||
} else if (val instanceof Collection || val.getClass().isArray()) {
|
||||
BsonUtils.addToMap(bson, simpleKey,
|
||||
writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new ArrayList<>()));
|
||||
writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList()));
|
||||
} else {
|
||||
Document document = new Document();
|
||||
TypeInformation<?> valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType()
|
||||
@@ -1279,7 +966,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
boolean notTheSameClass = !valueType.equals(reference);
|
||||
if (notTheSameClass) {
|
||||
getTypeMapper().writeType(valueType, bson);
|
||||
typeMapper.writeType(valueType, bson);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1487,7 +1174,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
Assert.notNull(bson, "Document must not be null!");
|
||||
Assert.notNull(targetType, "TypeInformation must not be null!");
|
||||
|
||||
Class<?> mapType = getTypeMapper().readType(bson, targetType).getType();
|
||||
Class<?> mapType = typeMapper.readType(bson, targetType).getType();
|
||||
|
||||
TypeInformation<?> keyType = targetType.getComponentType();
|
||||
TypeInformation<?> valueType = targetType.getMapValueType() == null ? ClassTypeInformation.OBJECT
|
||||
@@ -1506,7 +1193,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
sourceMap.forEach((k, v) -> {
|
||||
|
||||
if (getTypeMapper().isTypeKey(k)) {
|
||||
if (typeMapper.isTypeKey(k)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1669,7 +1356,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
}
|
||||
|
||||
if (getTypeMapper().isTypeKey(key)) {
|
||||
if (typeMapper.isTypeKey(key)) {
|
||||
|
||||
keyToRemove = key;
|
||||
|
||||
@@ -1840,7 +1527,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
target.conversions = conversions;
|
||||
target.spELContext = spELContext;
|
||||
target.setInstantiators(instantiators);
|
||||
target.defaultTypeMapper = defaultTypeMapper;
|
||||
target.typeMapper = typeMapper;
|
||||
target.setCodecRegistryProvider(dbFactory);
|
||||
target.afterPropertiesSet();
|
||||
@@ -1890,14 +1576,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return true;
|
||||
}
|
||||
|
||||
static Predicate<MongoPersistentProperty> isIdentifier(PersistentEntity<?, ?> entity) {
|
||||
return entity::isIdProperty;
|
||||
}
|
||||
|
||||
static Predicate<MongoPersistentProperty> isConstructorArgument(PersistentEntity<?, ?> entity) {
|
||||
return entity::isConstructorArgument;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field
|
||||
* of the configured source {@link Document}.
|
||||
@@ -1961,15 +1639,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
return (T) context.convert(value, property.getTypeInformation());
|
||||
}
|
||||
|
||||
public MongoDbPropertyValueProvider withContext(ConversionContext context) {
|
||||
if (context == this.context) {
|
||||
return this;
|
||||
}
|
||||
|
||||
return new MongoDbPropertyValueProvider(context, accessor, evaluator);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2017,11 +1686,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler);
|
||||
}
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return (T) dbRefResolver.resolveReference(property, accessor.get(property), referenceLookupDelegate,
|
||||
context::convert);
|
||||
}
|
||||
|
||||
return super.getPropertyValue(property);
|
||||
}
|
||||
}
|
||||
@@ -2188,13 +1852,13 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
*/
|
||||
protected static class ConversionContext {
|
||||
|
||||
final org.springframework.data.convert.CustomConversions conversions;
|
||||
final ObjectPath path;
|
||||
final ContainerValueConverter<Bson> documentConverter;
|
||||
final ContainerValueConverter<Collection<?>> collectionConverter;
|
||||
final ContainerValueConverter<Bson> mapConverter;
|
||||
final ContainerValueConverter<DBRef> dbRefConverter;
|
||||
final ValueConverter<Object> elementConverter;
|
||||
private final org.springframework.data.convert.CustomConversions conversions;
|
||||
private final ObjectPath path;
|
||||
private final ContainerValueConverter<Bson> documentConverter;
|
||||
private final ContainerValueConverter<Collection<?>> collectionConverter;
|
||||
private final ContainerValueConverter<Bson> mapConverter;
|
||||
private final ContainerValueConverter<DBRef> dbRefConverter;
|
||||
private final ValueConverter<Object> elementConverter;
|
||||
|
||||
ConversionContext(org.springframework.data.convert.CustomConversions customConversions, ObjectPath path,
|
||||
ContainerValueConverter<Bson> documentConverter, ContainerValueConverter<Collection<?>> collectionConverter,
|
||||
@@ -2244,7 +1908,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
|
||||
if (typeHint.isMap()) {
|
||||
|
||||
if (ClassUtils.isAssignable(Document.class, typeHint.getType())) {
|
||||
if(ClassUtils.isAssignable(Document.class, typeHint.getType())) {
|
||||
return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint);
|
||||
}
|
||||
|
||||
@@ -2252,8 +1916,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return (S) mapConverter.convert(this, BsonUtils.asBson(source), typeHint);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Expected map like structure but found %s", source.getClass()));
|
||||
throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass()));
|
||||
}
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
@@ -2290,10 +1953,6 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
return path;
|
||||
}
|
||||
|
||||
public ConversionContext forProperty(String name) {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a simple {@code source} value into {@link TypeInformation the target type}.
|
||||
*
|
||||
@@ -2318,58 +1977,4 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class PropertyTranslatingPropertyAccessor<T> implements PersistentPropertyPathAccessor<T> {
|
||||
|
||||
private final PersistentPropertyAccessor<T> delegate;
|
||||
private final PersistentPropertyTranslator propertyTranslator;
|
||||
|
||||
private PropertyTranslatingPropertyAccessor(PersistentPropertyAccessor<T> delegate,
|
||||
PersistentPropertyTranslator propertyTranslator) {
|
||||
this.delegate = delegate;
|
||||
this.propertyTranslator = propertyTranslator;
|
||||
}
|
||||
|
||||
static <T> PersistentPropertyAccessor<T> create(PersistentPropertyAccessor<T> delegate,
|
||||
PersistentPropertyTranslator propertyTranslator) {
|
||||
return new PropertyTranslatingPropertyAccessor<>(delegate, propertyTranslator);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentProperty property, @Nullable Object value) {
|
||||
delegate.setProperty(translate(property), value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(PersistentProperty<?> property) {
|
||||
return delegate.getProperty(translate(property));
|
||||
}
|
||||
|
||||
@Override
|
||||
public T getBean() {
|
||||
return delegate.getBean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path, Object value,
|
||||
AccessOptions.SetOptions options) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path,
|
||||
AccessOptions.GetOptions context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProperty(PersistentPropertyPath<? extends PersistentProperty<?>> path, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private MongoPersistentProperty translate(PersistentProperty<?> property) {
|
||||
return propertyTranslator.translate((MongoPersistentProperty) property);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -19,17 +19,13 @@ import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.springframework.core.convert.ConversionException;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.convert.EntityConverter;
|
||||
import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.convert.TypeMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.EntityProjection;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -58,35 +54,6 @@ public interface MongoConverter
|
||||
*/
|
||||
MongoTypeMapper getTypeMapper();
|
||||
|
||||
/**
|
||||
* Returns the {@link ProjectionFactory} for this converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @since 3.4
|
||||
*/
|
||||
ProjectionFactory getProjectionFactory();
|
||||
|
||||
/**
|
||||
* Returns the {@link CustomConversions} for this converter.
|
||||
*
|
||||
* @return will never be {@literal null}.
|
||||
* @since 3.4
|
||||
*/
|
||||
CustomConversions getCustomConversions();
|
||||
|
||||
/**
|
||||
* Apply a projection to {@link Bson} and return the projection return type {@code R}.
|
||||
* {@link EntityProjection#isProjection() Non-projecting} descriptors fall back to {@link #read(Class, Object) regular
|
||||
* object materialization}.
|
||||
*
|
||||
* @param descriptor the projection descriptor, must not be {@literal null}.
|
||||
* @param bson must not be {@literal null}.
|
||||
* @param <R>
|
||||
* @return a new instance of the projection return type {@code R}.
|
||||
* @since 3.4
|
||||
*/
|
||||
<R> R project(EntityProjection<R, ?> descriptor, Bson bson);
|
||||
|
||||
/**
|
||||
* Mapping function capable of converting values into a desired target type by eg. extracting the actual java type
|
||||
* from a given {@link BsonValue}.
|
||||
@@ -187,5 +154,4 @@ public interface MongoConverter
|
||||
return convertToMongoType(id,(TypeInformation<?>) null);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.MongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents}
|
||||
* for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader {
|
||||
|
||||
private static final Log LOGGER = LogFactory.getLog(MongoDatabaseFactoryReferenceLoader.class);
|
||||
|
||||
private final MongoDatabaseFactory mongoDbFactory;
|
||||
|
||||
/**
|
||||
* @param mongoDbFactory must not be {@literal null}.
|
||||
*/
|
||||
public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) {
|
||||
|
||||
Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!");
|
||||
|
||||
this.mongoDbFactory = mongoDbFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
|
||||
|
||||
MongoCollection<Document> collection = getCollection(context);
|
||||
|
||||
if (LOGGER.isTraceEnabled()) {
|
||||
LOGGER.trace(String.format("Bulk fetching %s from %s.%s.", referenceQuery,
|
||||
StringUtils.hasText(context.getDatabase()) ? context.getDatabase()
|
||||
: collection.getNamespace().getDatabaseName(),
|
||||
context.getCollection()));
|
||||
}
|
||||
|
||||
return referenceQuery.apply(collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoCollection} for a given {@link ReferenceCollection} from the underlying
|
||||
* {@link MongoDatabaseFactory}.
|
||||
*
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the {@link MongoCollection} targeted by the {@link ReferenceCollection}.
|
||||
*/
|
||||
protected MongoCollection<Document> getCollection(ReferenceCollection context) {
|
||||
|
||||
return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(),
|
||||
Document.class);
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,6 @@ package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.convert.EntityWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentPointer;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.util.TypeInformation;
|
||||
@@ -63,7 +61,6 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity<?> entity) {
|
||||
return convertToMongoType(obj, entity.getTypeInformation());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link DBRef} to refer to the given object.
|
||||
*
|
||||
@@ -73,17 +70,4 @@ public interface MongoWriter<T> extends EntityWriter<T, Bson> {
|
||||
* @return will never be {@literal null}.
|
||||
*/
|
||||
DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referingProperty);
|
||||
|
||||
/**
|
||||
* Creates a the {@link DocumentPointer} representing the link to another entity.
|
||||
*
|
||||
* @param source the object to create a document link to.
|
||||
* @param referringProperty the client-side property referring to the object which might carry additional metadata for
|
||||
* the {@link DBRef} object to create. Can be {@literal null}.
|
||||
* @return will never be {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
default DocumentPointer<?> toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) {
|
||||
return () -> toDBRef(source, referringProperty);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.core.convert;
|
||||
import java.util.List;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
@@ -70,11 +69,4 @@ public enum NoOpDbRefResolver implements DbRefResolver {
|
||||
private <T> T handle() throws UnsupportedOperationException {
|
||||
throw new UnsupportedOperationException("DBRef resolution is not supported!");
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Object resolveReference(MongoPersistentProperty property, Object source,
|
||||
ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright 2011-2022 the original author or authors.
|
||||
* Copyright 2011-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
@@ -21,16 +21,14 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.bson.types.ObjectId;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.core.convert.ConversionService;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.data.annotation.Reference;
|
||||
import org.springframework.data.domain.Example;
|
||||
import org.springframework.data.mapping.Association;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
@@ -75,7 +73,7 @@ import com.mongodb.DBRef;
|
||||
*/
|
||||
public class QueryMapper {
|
||||
|
||||
protected static final Log LOGGER = LogFactory.getLog(QueryMapper.class);
|
||||
protected static final Logger LOGGER = LoggerFactory.getLogger(QueryMapper.class);
|
||||
|
||||
private static final List<String> DEFAULT_ID_NAMES = Arrays.asList("id", "_id");
|
||||
private static final Document META_TEXT_SCORE = new Document("$meta", "textScore");
|
||||
@@ -223,8 +221,8 @@ public class QueryMapper {
|
||||
|
||||
if (fields.isEmpty()) {
|
||||
return BsonUtils.EMPTY_DOCUMENT;
|
||||
}
|
||||
|
||||
}
|
||||
Document target = new Document();
|
||||
|
||||
BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> {
|
||||
@@ -240,18 +238,6 @@ public class QueryMapper {
|
||||
return target;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds missing {@code $meta} representation if required.
|
||||
*
|
||||
* @param source must not be {@literal null}.
|
||||
* @param entity can be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.4
|
||||
*/
|
||||
public Document addMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity) {
|
||||
return mapMetaAttributes(source, entity, MetaMapping.FORCE);
|
||||
}
|
||||
|
||||
private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity<?> entity,
|
||||
MetaMapping metaMapping) {
|
||||
|
||||
@@ -530,10 +516,6 @@ public class QueryMapper {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MongoPersistentEntity<?> entity = documentField.getPropertyEntity();
|
||||
return entity.hasIdProperty()
|
||||
&& (type.equals(DBRef.class) || entity.getRequiredIdProperty().getActualType().isAssignableFrom(type));
|
||||
@@ -647,7 +629,7 @@ public class QueryMapper {
|
||||
if (source instanceof Iterable) {
|
||||
BasicDBList result = new BasicDBList();
|
||||
for (Object element : (Iterable<?>) source) {
|
||||
result.add(createReferenceFor(element, property));
|
||||
result.add(createDbRefFor(element, property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -656,12 +638,12 @@ public class QueryMapper {
|
||||
Document result = new Document();
|
||||
Document dbObject = (Document) source;
|
||||
for (String key : dbObject.keySet()) {
|
||||
result.put(key, createReferenceFor(dbObject.get(key), property));
|
||||
result.put(key, createDbRefFor(dbObject.get(key), property));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return createReferenceFor(source, property);
|
||||
return createDbRefFor(source, property);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -708,17 +690,12 @@ public class QueryMapper {
|
||||
return new AbstractMap.SimpleEntry<>(key, value);
|
||||
}
|
||||
|
||||
private Object createReferenceFor(Object source, MongoPersistentProperty property) {
|
||||
private DBRef createDbRefFor(Object source, MongoPersistentProperty property) {
|
||||
|
||||
if (source instanceof DBRef) {
|
||||
return (DBRef) source;
|
||||
}
|
||||
|
||||
if (property != null && (property.isDocumentReference()
|
||||
|| (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) {
|
||||
return converter.toDocumentPointer(source, property).getPointer();
|
||||
}
|
||||
|
||||
return converter.toDBRef(source, property);
|
||||
}
|
||||
|
||||
@@ -782,7 +759,7 @@ public class QueryMapper {
|
||||
|
||||
/**
|
||||
* Returns whether the given {@link String} is a MongoDB keyword. The default implementation will check against the
|
||||
* set of registered keywords.
|
||||
* set of registered keywords returned by {@link #getKeywords()}.
|
||||
*
|
||||
* @param candidate
|
||||
* @return
|
||||
@@ -1237,9 +1214,9 @@ public class QueryMapper {
|
||||
|
||||
String types = StringUtils.collectionToDelimitedString(
|
||||
path.stream().map(it -> it.getType().getSimpleName()).collect(Collectors.toList()), " -> ");
|
||||
QueryMapper.LOGGER.info(String.format(
|
||||
"Could not map '%s'. Maybe a fragment in '%s' is considered a simple type. Mapper continues with %s.",
|
||||
path, types, pathExpression));
|
||||
QueryMapper.LOGGER.info(
|
||||
"Could not map '{}'. Maybe a fragment in '{}' is considered a simple type. Mapper continues with {}.",
|
||||
path, types, pathExpression);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -1411,14 +1388,6 @@ public class QueryMapper {
|
||||
this.currentIndex = 0;
|
||||
}
|
||||
|
||||
String nextToken() {
|
||||
return pathParts.get(currentIndex + 1);
|
||||
}
|
||||
|
||||
boolean hasNexToken() {
|
||||
return pathParts.size() > currentIndex + 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps the property name while retaining potential positional operator {@literal $}.
|
||||
*
|
||||
@@ -1428,26 +1397,31 @@ public class QueryMapper {
|
||||
protected String mapPropertyName(MongoPersistentProperty property) {
|
||||
|
||||
StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property));
|
||||
if (!hasNexToken()) {
|
||||
return mappedName.toString();
|
||||
|
||||
boolean inspect = iterator.hasNext();
|
||||
|
||||
while (inspect) {
|
||||
|
||||
String partial = iterator.next();
|
||||
currentIndex++;
|
||||
|
||||
boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike() ;
|
||||
if(property.isMap() && currentPropertyRoot.equals(partial) && iterator.hasNext()){
|
||||
partial = iterator.next();
|
||||
currentIndex++;
|
||||
}
|
||||
|
||||
if (isPositional || property.isMap() && !currentPropertyRoot.equals(partial)) {
|
||||
mappedName.append(".").append(partial);
|
||||
}
|
||||
|
||||
inspect = isPositional && iterator.hasNext();
|
||||
}
|
||||
|
||||
String nextToken = nextToken();
|
||||
if (isPositionalParameter(nextToken)) {
|
||||
|
||||
mappedName.append(".").append(nextToken);
|
||||
currentIndex += 2;
|
||||
return mappedName.toString();
|
||||
if(currentIndex + 1 < pathParts.size()) {
|
||||
currentIndex++;
|
||||
currentPropertyRoot = pathParts.get(currentIndex);
|
||||
}
|
||||
|
||||
if (property.isMap()) {
|
||||
|
||||
mappedName.append(".").append(nextToken);
|
||||
currentIndex += 2;
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
currentIndex++;
|
||||
return mappedName.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a
|
||||
* {@link ReferenceLoader.DocumentReferenceQuery}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface ReferenceLoader {
|
||||
|
||||
/**
|
||||
* Obtain a single {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
|
||||
*
|
||||
* @param referenceQuery must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the matching {@link Document} or {@literal null} if none found.
|
||||
*/
|
||||
@Nullable
|
||||
default Document fetchOne(DocumentReferenceQuery referenceQuery, ReferenceCollection context) {
|
||||
|
||||
Iterator<Document> it = fetchMany(referenceQuery, context).iterator();
|
||||
return it.hasNext() ? it.next() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain multiple {@link Document} matching the given {@literal referenceQuery} in the {@literal context}.
|
||||
*
|
||||
* @param referenceQuery must not be {@literal null}.
|
||||
* @param context must not be {@literal null}.
|
||||
* @return the matching {@link Document} or {@literal null} if none found.
|
||||
*/
|
||||
Iterable<Document> fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context);
|
||||
|
||||
/**
|
||||
* The {@link DocumentReferenceQuery} defines the criteria by which {@link Document documents} should be matched
|
||||
* applying potentially given order criteria.
|
||||
*/
|
||||
interface DocumentReferenceQuery {
|
||||
|
||||
/**
|
||||
* Get the query to obtain matching {@link Document documents}.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Bson getQuery();
|
||||
|
||||
/**
|
||||
* Get the sort criteria for ordering results.
|
||||
*
|
||||
* @return an empty {@link Document} by default. Never {@literal null}.
|
||||
*/
|
||||
default Bson getSort() {
|
||||
return new Document();
|
||||
}
|
||||
|
||||
default Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
return restoreOrder(collection.find(getQuery()).sort(getSort()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore the order of fetched documents.
|
||||
*
|
||||
* @param documents must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
default Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
return documents;
|
||||
}
|
||||
|
||||
static DocumentReferenceQuery forSingleDocument(Bson bson) {
|
||||
|
||||
return new DocumentReferenceQuery() {
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return bson;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
|
||||
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
|
||||
return result != null ? Collections.singleton(result) : Collections.emptyList();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static DocumentReferenceQuery forManyDocuments(Bson bson) {
|
||||
|
||||
return new DocumentReferenceQuery() {
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return bson;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
return collection.find(getQuery()).sort(getSort());
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,491 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core.convert;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.SpELContext;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader;
|
||||
import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection;
|
||||
import org.springframework.data.mongodb.core.mapping.DocumentReference;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingContext;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
|
||||
import org.springframework.data.mongodb.util.json.ValueProvider;
|
||||
import org.springframework.data.mongodb.util.spel.ExpressionUtils;
|
||||
import org.springframework.data.util.Streamable;
|
||||
import org.springframework.expression.EvaluationContext;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.DBRef;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
|
||||
/**
|
||||
* A common delegate for {@link ReferenceResolver} implementations to resolve a reference to one/many target documents
|
||||
* that are converted to entities.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.3
|
||||
*/
|
||||
public final class ReferenceLookupDelegate {
|
||||
|
||||
private static final Document NO_RESULTS_PREDICATE = new Document("_id", new Document("$exists", false));
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final SpELContext spELContext;
|
||||
private final ParameterBindingDocumentCodec codec;
|
||||
|
||||
/**
|
||||
* Create a new {@link ReferenceLookupDelegate}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @param spELContext must not be {@literal null}.
|
||||
*/
|
||||
public ReferenceLookupDelegate(
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
SpELContext spELContext) {
|
||||
|
||||
Assert.notNull(mappingContext, "MappingContext must not be null");
|
||||
Assert.notNull(spELContext, "SpELContext must not be null");
|
||||
|
||||
this.mappingContext = mappingContext;
|
||||
this.spELContext = spELContext;
|
||||
this.codec = new ParameterBindingDocumentCodec();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the reference expressed by the given property.
|
||||
*
|
||||
* @param property the reference defining property. Must not be {@literal null}. THe
|
||||
* @param source the source value identifying to the referenced entity. Must not be {@literal null}.
|
||||
* @param lookupFunction to execute a lookup query. Must not be {@literal null}.
|
||||
* @param entityReader the callback to convert raw source values into actual domain types. Must not be
|
||||
* {@literal null}.
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
public Object readReference(MongoPersistentProperty property, Object source, LookupFunction lookupFunction,
|
||||
MongoEntityReader entityReader) {
|
||||
|
||||
Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource()
|
||||
: source;
|
||||
|
||||
DocumentReferenceQuery filter = computeFilter(property, source, spELContext);
|
||||
ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext);
|
||||
|
||||
Iterable<Document> result = lookupFunction.apply(filter, referenceCollection);
|
||||
|
||||
if (property.isCollectionLike()) {
|
||||
return entityReader.read(result, property.getTypeInformation());
|
||||
}
|
||||
|
||||
if (!result.iterator().hasNext()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Object resultValue = result.iterator().next();
|
||||
return resultValue != null ? entityReader.read(resultValue, property.getTypeInformation()) : null;
|
||||
}
|
||||
|
||||
private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value,
|
||||
SpELContext spELContext) {
|
||||
|
||||
// Use the first value as a reference for others in case of collection like
|
||||
if (value instanceof Iterable) {
|
||||
|
||||
Iterator<?> iterator = ((Iterable<?>) value).iterator();
|
||||
value = iterator.hasNext() ? iterator.next() : new Document();
|
||||
}
|
||||
|
||||
// handle DBRef value
|
||||
if (value instanceof DBRef) {
|
||||
return ReferenceCollection.fromDBRef((DBRef) value);
|
||||
}
|
||||
|
||||
String collection = mappingContext.getRequiredPersistentEntity(property.getAssociationTargetType()).getCollection();
|
||||
|
||||
if (value instanceof Document) {
|
||||
|
||||
Document documentPointer = (Document) value;
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
|
||||
ParameterBindingContext bindingContext = bindingContext(property, value, spELContext);
|
||||
DocumentReference documentReference = property.getDocumentReference();
|
||||
|
||||
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext,
|
||||
() -> documentPointer.get("db", String.class));
|
||||
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext,
|
||||
() -> documentPointer.get("collection", collection));
|
||||
return new ReferenceCollection(targetDatabase, targetCollection);
|
||||
}
|
||||
|
||||
return new ReferenceCollection(documentPointer.getString("db"), documentPointer.get("collection", collection));
|
||||
}
|
||||
|
||||
if (property.isDocumentReference()) {
|
||||
|
||||
ParameterBindingContext bindingContext = bindingContext(property, value, spELContext);
|
||||
DocumentReference documentReference = property.getDocumentReference();
|
||||
|
||||
String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null);
|
||||
String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> collection);
|
||||
|
||||
return new ReferenceCollection(targetDatabase, targetCollection);
|
||||
}
|
||||
|
||||
return new ReferenceCollection(null, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the given {@link ParameterBindingContext} to compute potential expressions against the value.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @param bindingContext must not be {@literal null}.
|
||||
* @param defaultValue
|
||||
* @param <T>
|
||||
* @return can be {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier<T> defaultValue) {
|
||||
|
||||
if (!StringUtils.hasText(value)) {
|
||||
return defaultValue.get();
|
||||
}
|
||||
|
||||
// parameter binding requires a document, since we do not have one, construct it.
|
||||
if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) {
|
||||
String s = "{ 'target-value' : " + value + "}";
|
||||
T evaluated = (T) codec.decode(s, bindingContext).get("target-value");
|
||||
return evaluated != null ? evaluated : defaultValue.get();
|
||||
}
|
||||
|
||||
if (BsonUtils.isJsonDocument(value)) {
|
||||
return (T) codec.decode(value, bindingContext);
|
||||
}
|
||||
|
||||
if (!value.startsWith("#") && ExpressionUtils.detectExpression(value) == null) {
|
||||
return (T) value;
|
||||
}
|
||||
|
||||
T evaluated = (T) bindingContext.evaluateExpression(value);
|
||||
return evaluated != null ? evaluated : defaultValue.get();
|
||||
}
|
||||
|
||||
ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
ValueProvider valueProvider = valueProviderFor(DocumentReferenceSource.getTargetSource(source));
|
||||
|
||||
return new ParameterBindingContext(valueProvider, spELContext.getParser(),
|
||||
() -> evaluationContextFor(property, source, spELContext));
|
||||
}
|
||||
|
||||
ValueProvider valueProviderFor(Object source) {
|
||||
|
||||
return (index) -> {
|
||||
if (source instanceof Document) {
|
||||
return Streamable.of(((Document) source).values()).toList().get(index);
|
||||
}
|
||||
return source;
|
||||
};
|
||||
}
|
||||
|
||||
EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
Object target = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource()
|
||||
: source;
|
||||
|
||||
if (target == null) {
|
||||
target = new Document();
|
||||
}
|
||||
|
||||
EvaluationContext ctx = spELContext.getEvaluationContext(target);
|
||||
ctx.setVariable("target", target);
|
||||
ctx.setVariable("self", DocumentReferenceSource.getSelf(source));
|
||||
ctx.setVariable(property.getName(), target);
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the query to retrieve linked documents.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param source must not be {@literal null}.
|
||||
* @param spELContext must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object source, SpELContext spELContext) {
|
||||
|
||||
DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference()
|
||||
: ReferenceEmulatingDocumentReference.INSTANCE;
|
||||
|
||||
String lookup = documentReference.lookup();
|
||||
|
||||
Object value = DocumentReferenceSource.getTargetSource(source);
|
||||
|
||||
Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext),
|
||||
Document::new);
|
||||
|
||||
if (property.isCollectionLike() && (value instanceof Collection || value == null)) {
|
||||
|
||||
if (value == null) {
|
||||
return new ListDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)),
|
||||
sort);
|
||||
}
|
||||
|
||||
Collection<Object> objects = (Collection<Object>) value;
|
||||
|
||||
if (objects.isEmpty()) {
|
||||
return new ListDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort);
|
||||
}
|
||||
|
||||
List<Document> ors = new ArrayList<>(objects.size());
|
||||
for (Object entry : objects) {
|
||||
|
||||
Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext));
|
||||
ors.add(decoded);
|
||||
}
|
||||
|
||||
return new ListDocumentReferenceQuery(new Document("$or", ors), sort);
|
||||
}
|
||||
|
||||
if (property.isMap() && value instanceof Map) {
|
||||
|
||||
Set<Entry<Object, Object>> entries = ((Map<Object, Object>) value).entrySet();
|
||||
if (entries.isEmpty()) {
|
||||
return new MapDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort, Collections.emptyMap());
|
||||
}
|
||||
|
||||
Map<Object, Document> filterMap = new LinkedHashMap<>(entries.size());
|
||||
|
||||
for (Entry<Object, Object> entry : entries) {
|
||||
|
||||
Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext));
|
||||
filterMap.put(entry.getKey(), decoded);
|
||||
}
|
||||
|
||||
return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap);
|
||||
}
|
||||
|
||||
return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), sort);
|
||||
}
|
||||
|
||||
enum ReferenceEmulatingDocumentReference implements DocumentReference {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
@Override
|
||||
public Class<? extends Annotation> annotationType() {
|
||||
return DocumentReference.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String db() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String collection() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String lookup() {
|
||||
return "{ '_id' : ?#{#target} }";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sort() {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean lazy() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation fetching a single {@link Document}.
|
||||
*/
|
||||
static class SingleDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
|
||||
public SingleDocumentReferenceQuery(Document query, Document sort) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> apply(MongoCollection<Document> collection) {
|
||||
|
||||
Document result = collection.find(getQuery()).sort(getSort()).limit(1).first();
|
||||
return result != null ? Collections.singleton(result) : Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
|
||||
* {@link Map} structure. Restores the original map order by matching individual query documents against the actual
|
||||
* values.
|
||||
*/
|
||||
static class MapDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
private final Map<Object, Document> filterOrderMap;
|
||||
|
||||
public MapDocumentReferenceQuery(Document query, Document sort, Map<Object, Document> filterOrderMap) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
this.filterOrderMap = filterOrderMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Bson getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
|
||||
Map<String, Object> targetMap = new LinkedHashMap<>();
|
||||
List<Document> collected = documents instanceof List ? (List<Document>) documents
|
||||
: Streamable.of(documents).toList();
|
||||
|
||||
for (Entry<Object, Document> filterMapping : filterOrderMap.entrySet()) {
|
||||
|
||||
Optional<Document> first = collected.stream()
|
||||
.filter(it -> it.entrySet().containsAll(filterMapping.getValue().entrySet())).findFirst();
|
||||
|
||||
targetMap.put(filterMapping.getKey().toString(), first.orElse(null));
|
||||
}
|
||||
return Collections.singleton(new Document(targetMap));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a
|
||||
* {@link Collection} like structure. Restores the original order by matching individual query documents against the
|
||||
* actual values.
|
||||
*/
|
||||
static class ListDocumentReferenceQuery implements DocumentReferenceQuery {
|
||||
|
||||
private final Document query;
|
||||
private final Document sort;
|
||||
|
||||
public ListDocumentReferenceQuery(Document query, Document sort) {
|
||||
|
||||
this.query = query;
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Document> restoreOrder(Iterable<Document> documents) {
|
||||
|
||||
List<Document> target = documents instanceof List ? (List<Document>) documents
|
||||
: Streamable.of(documents).toList();
|
||||
|
||||
if (!sort.isEmpty() || !query.containsKey("$or")) {
|
||||
return target;
|
||||
}
|
||||
|
||||
List<Document> ors = query.get("$or", List.class);
|
||||
return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public Document getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getSort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
int compareAgainstReferenceIndex(List<Document> referenceList, Document document1, Document document2) {
|
||||
|
||||
for (Document document : referenceList) {
|
||||
|
||||
Set<Entry<String, Object>> entries = document.entrySet();
|
||||
if (document1.entrySet().containsAll(entries)) {
|
||||
return -1;
|
||||
}
|
||||
if (document2.entrySet().containsAll(entries)) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
return referenceList.size();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The function that can execute a given {@link DocumentReferenceQuery} within the {@link ReferenceCollection} to
|
||||
* obtain raw results.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
interface LookupFunction {
|
||||
|
||||
/**
|
||||
* @param referenceQuery never {@literal null}.
|
||||
* @param referenceCollection never {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
Iterable<Document> apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user