Compare commits
146 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5b502a095f | ||
|
|
7552329598 | ||
|
|
fa4f8bfe9f | ||
|
|
58fde292d5 | ||
|
|
e71f8b34be | ||
|
|
152d84a200 | ||
|
|
ddf77d55ff | ||
|
|
2acf1bc5d4 | ||
|
|
26f230036a | ||
|
|
3ee696dab2 | ||
|
|
7ee12e9637 | ||
|
|
414d716c07 | ||
|
|
5f9e25cd8a | ||
|
|
caa2dd4e85 | ||
|
|
28fc1e4823 | ||
|
|
d62639d11c | ||
|
|
b13e47d8ac | ||
|
|
3743ca56df | ||
|
|
0f974130d1 | ||
|
|
020da42800 | ||
|
|
4307b4cd91 | ||
|
|
af874b635c | ||
|
|
017e01810d | ||
|
|
05b46779cb | ||
|
|
ba56d247c9 | ||
|
|
4484dac574 | ||
|
|
af4b1788fc | ||
|
|
af40f15a36 | ||
|
|
193b7de2d9 | ||
|
|
f5204e859f | ||
|
|
16818324d8 | ||
|
|
a9269c0086 | ||
|
|
d3624b9af4 | ||
|
|
3711ed5b50 | ||
|
|
c20feabada | ||
|
|
f93e40fdee | ||
|
|
dca508f3dc | ||
|
|
742448a281 | ||
|
|
1a4749e97c | ||
|
|
a3cbe829b2 | ||
|
|
4ca6f29666 | ||
|
|
df65644d59 | ||
|
|
05a169c858 | ||
|
|
e49119038d | ||
|
|
f2df84e8c4 | ||
|
|
1504fd0529 | ||
|
|
f63610c925 | ||
|
|
06eb52cf13 | ||
|
|
c9a4a8a370 | ||
|
|
8d17ddb92c | ||
|
|
0e96ef2d0c | ||
|
|
4d8b965176 | ||
|
|
37bb390447 | ||
|
|
5c79e1e4ef | ||
|
|
9b41d70f75 | ||
|
|
b2e52ab28f | ||
|
|
fed33444e9 | ||
|
|
a4ad37703f | ||
|
|
42e95e9600 | ||
|
|
ed894aa51a | ||
|
|
693ff04294 | ||
|
|
b018b31e20 | ||
|
|
bacb75d098 | ||
|
|
ec0fe6f994 | ||
|
|
d3d690b908 | ||
|
|
b98b5125f1 | ||
|
|
d68bd4b44a | ||
|
|
a73ae9a1a5 | ||
|
|
ac3c578e93 | ||
|
|
9872f8cb07 | ||
|
|
1b1ab2c495 | ||
|
|
6554274dde | ||
|
|
34f91acbfe | ||
|
|
8e7d508cfe | ||
|
|
04dd78cb36 | ||
|
|
52c554cfe1 | ||
|
|
c1bb8c4ba5 | ||
|
|
ed9a367bb8 | ||
|
|
b88ce46239 | ||
|
|
42ec94d321 | ||
|
|
53f35e185f | ||
|
|
e08bfd253c | ||
|
|
caaafa275d | ||
|
|
1f082abc7f | ||
|
|
df77fcc19d | ||
|
|
007b965673 | ||
|
|
1b9680cece | ||
|
|
13f1d21919 | ||
|
|
4c10bf30bc | ||
|
|
8f78d0e0d8 | ||
|
|
56115a263c | ||
|
|
66b809318a | ||
|
|
bfff60d915 | ||
|
|
051e973226 | ||
|
|
eb61629f10 | ||
|
|
85783e5354 | ||
|
|
433b012b91 | ||
|
|
8dca0049ca | ||
|
|
635f3b82be | ||
|
|
e69c7e1134 | ||
|
|
1f94e74b75 | ||
|
|
98858e0f5f | ||
|
|
90f311de51 | ||
|
|
7f7015fd86 | ||
|
|
ab9c5d73a0 | ||
|
|
13caa162db | ||
|
|
d5d620d777 | ||
|
|
21d50f2a72 | ||
|
|
4912d62be6 | ||
|
|
2bf6f226d6 | ||
|
|
1a6f7e371a | ||
|
|
2ef5d795ce | ||
|
|
e3d2f16202 | ||
|
|
c44232ff39 | ||
|
|
385e911708 | ||
|
|
171d8b2b1e | ||
|
|
7ac7eefad6 | ||
|
|
7a39e94e4b | ||
|
|
25733664b3 | ||
|
|
81da10f499 | ||
|
|
bde114ed19 | ||
|
|
a2403f58ec | ||
|
|
875b8eda9f | ||
|
|
fc0a021937 | ||
|
|
a6aa174ff5 | ||
|
|
8b36617752 | ||
|
|
56e8799c22 | ||
|
|
88e60070d6 | ||
|
|
6c7039580f | ||
|
|
fba003f215 | ||
|
|
1a3239554c | ||
|
|
37b541931d | ||
|
|
9038280f68 | ||
|
|
75935a2bdb | ||
|
|
d9ca3d7eb3 | ||
|
|
04e77ad5ab | ||
|
|
0ab39a17a7 | ||
|
|
49a6f13797 | ||
|
|
b0fd6f691b | ||
|
|
b5778772d9 | ||
|
|
0f55fb305d | ||
|
|
5ae7547465 | ||
|
|
cf4e04a30e | ||
|
|
89c1dc77d9 | ||
|
|
a2c842b59b | ||
|
|
0cd0be9478 |
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -6,6 +6,7 @@ Make sure that:
|
||||
-->
|
||||
|
||||
- [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc).
|
||||
- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO).
|
||||
- [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes.
|
||||
- [ ] You submit test cases (unit or integration tests) that back your changes.
|
||||
- [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only).
|
||||
|
||||
47
.github/workflows/project.yml
vendored
47
.github/workflows/project.yml
vendored
@@ -1,47 +0,0 @@
|
||||
# GitHub Actions to automate GitHub issues for Spring Data Project Management
|
||||
|
||||
name: Spring Data GitHub Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, edited, reopened]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_target:
|
||||
types: [opened, edited, reopened]
|
||||
|
||||
jobs:
|
||||
Inbox:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null
|
||||
steps:
|
||||
- name: Create or Update Issue Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Inbox'
|
||||
project-location: 'spring-projects'
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
Pull-Request:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null
|
||||
steps:
|
||||
- name: Create or Update Pull Request Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Review pending'
|
||||
project-location: 'spring-projects'
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
Feedback-Provided:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback')
|
||||
steps:
|
||||
- name: Update Project Card
|
||||
uses: peter-evans/create-or-update-project-card@v1.1.2
|
||||
with:
|
||||
project-name: 'Spring Data'
|
||||
column-name: 'Feedback provided'
|
||||
project-location: 'spring-projects'
|
||||
token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }}
|
||||
3
.mvn/wrapper/maven-wrapper.properties
vendored
3
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +1 @@
|
||||
#Mon Oct 11 14:30:24 CEST 2021
|
||||
distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.3/apache-maven-3.8.3-bin.zip
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip
|
||||
2
CI.adoc
2
CI.adoc
@@ -1,6 +1,6 @@
|
||||
= Continuous Integration
|
||||
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/]
|
||||
|
||||
|
||||
27
CODE_OF_CONDUCT.adoc
Normal file
27
CODE_OF_CONDUCT.adoc
Normal file
@@ -0,0 +1,27 @@
|
||||
= Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery
|
||||
* Personal attacks
|
||||
* Trolling or insulting/derogatory comments
|
||||
* Public or private harassment
|
||||
* Publishing other's private information, such as physical or electronic addresses,
|
||||
without explicit permission
|
||||
* Other unethical or unprofessional conduct
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io.
|
||||
All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances.
|
||||
Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
This Code of Conduct is adapted from the https://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at https://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/].
|
||||
@@ -1,3 +1,3 @@
|
||||
= Spring Data contribution guidelines
|
||||
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here].
|
||||
You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here].
|
||||
|
||||
108
Jenkinsfile
vendored
108
Jenkinsfile
vendored
@@ -3,7 +3,7 @@ pipeline {
|
||||
|
||||
triggers {
|
||||
pollSCM 'H/10 * * * *'
|
||||
upstream(upstreamProjects: "spring-data-commons/2.6.x", threshold: hudson.model.Result.SUCCESS)
|
||||
upstream(upstreamProjects: "spring-data-commons/2.3.x", threshold: hudson.model.Result.SUCCESS)
|
||||
}
|
||||
|
||||
options {
|
||||
@@ -14,22 +14,6 @@ pipeline {
|
||||
stages {
|
||||
stage("Docker images") {
|
||||
parallel {
|
||||
stage('Publish JDK 8 + MongoDB 5.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-5.0/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-5.0.0", "ci/openjdk8-mongodb-5.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.0') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.0/**"
|
||||
@@ -39,39 +23,39 @@ pipeline {
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0.23", "ci/openjdk8-mongodb-4.0/")
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.0", "ci/openjdk8-mongodb-4.0/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 8 + MongoDB 4.4') {
|
||||
stage('Publish JDK 8 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk8-mongodb-4.4/**"
|
||||
changeset "ci/openjdk8-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.4.4", "ci/openjdk8-mongodb-4.4/")
|
||||
def image = docker.build("springci/spring-data-openjdk8-with-mongodb-4.2.0", "ci/openjdk8-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Publish JDK 16 + MongoDB 4.4') {
|
||||
stage('Publish JDK 14 + MongoDB 4.2') {
|
||||
when {
|
||||
changeset "ci/openjdk16-mongodb-4.4/**"
|
||||
changeset "ci/openjdk14-mongodb-4.2/**"
|
||||
}
|
||||
agent { label 'data' }
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
|
||||
steps {
|
||||
script {
|
||||
def image = docker.build("springci/spring-data-openjdk16-with-mongodb-4.4.4", "ci/openjdk16-mongodb-4.4/")
|
||||
def image = docker.build("springci/spring-data-openjdk14-with-mongodb-4.2.0", "ci/openjdk14-mongodb-4.2/")
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
image.push()
|
||||
}
|
||||
@@ -83,9 +67,8 @@ pipeline {
|
||||
|
||||
stage("test: baseline (jdk8)") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
branch '3.0.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -99,7 +82,7 @@ pipeline {
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.2.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
@@ -114,9 +97,8 @@ pipeline {
|
||||
|
||||
stage("Test other configurations") {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
allOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
anyOf {
|
||||
branch '3.0.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -132,7 +114,7 @@ pipeline {
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0.23:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
@@ -145,7 +127,7 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 4.4 (jdk8)") {
|
||||
stage("test: mongodb 4.2 (jdk8)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
@@ -156,7 +138,7 @@ pipeline {
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-4.2.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
@@ -169,31 +151,7 @@ pipeline {
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: mongodb 5.0 (jdk8)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 30, unit: 'MINUTES') }
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk8-with-mongodb-5.0.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"'
|
||||
sh 'sleep 15'
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("test: baseline (jdk16)") {
|
||||
stage("test: baseline (jdk14)") {
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
@@ -204,7 +162,7 @@ pipeline {
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('springci/spring-data-openjdk16-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
docker.image('springci/spring-data-openjdk15-with-mongodb-4.2.0:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log'
|
||||
sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &'
|
||||
sh 'sleep 10'
|
||||
@@ -221,9 +179,8 @@ pipeline {
|
||||
|
||||
stage('Release to artifactory') {
|
||||
when {
|
||||
beforeAgent(true)
|
||||
anyOf {
|
||||
branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP")
|
||||
branch '3.0.x'
|
||||
not { triggeredBy 'UpstreamCause' }
|
||||
}
|
||||
}
|
||||
@@ -253,6 +210,35 @@ pipeline {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Publish documentation') {
|
||||
when {
|
||||
branch '3.0.x'
|
||||
}
|
||||
agent {
|
||||
label 'data'
|
||||
}
|
||||
options { timeout(time: 20, unit: 'MINUTES') }
|
||||
|
||||
environment {
|
||||
ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c')
|
||||
}
|
||||
|
||||
steps {
|
||||
script {
|
||||
docker.withRegistry('', 'hub.docker.com-springbuildmaster') {
|
||||
docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') {
|
||||
sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' +
|
||||
'-Dartifactory.server=https://repo.spring.io ' +
|
||||
"-Dartifactory.username=${ARTIFACTORY_USR} " +
|
||||
"-Dartifactory.password=${ARTIFACTORY_PSW} " +
|
||||
"-Dartifactory.distribution-repository=temp-private-local " +
|
||||
'-Dmaven.test.skip=true clean deploy -U -B'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
post {
|
||||
|
||||
124
README.adoc
124
README.adoc
@@ -1,6 +1,6 @@
|
||||
image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start]
|
||||
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]]
|
||||
|
||||
The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services.
|
||||
|
||||
@@ -8,12 +8,10 @@ The Spring Data MongoDB project aims to provide a familiar and consistent Spring
|
||||
The Spring Data MongoDB project provides integration with the MongoDB document database.
|
||||
Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer.
|
||||
|
||||
[[code-of-conduct]]
|
||||
== Code of Conduct
|
||||
|
||||
This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
This project is governed by the link:CODE_OF_CONDUCT.adoc[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io.
|
||||
|
||||
[[getting-started]]
|
||||
== Getting Started
|
||||
|
||||
Here is a quick teaser of an application using Spring Data Repositories in Java:
|
||||
@@ -61,7 +59,6 @@ class ApplicationConfig extends AbstractMongoClientConfiguration {
|
||||
}
|
||||
----
|
||||
|
||||
[[maven-configuration]]
|
||||
=== Maven configuration
|
||||
|
||||
Add the Maven dependency:
|
||||
@@ -71,25 +68,24 @@ Add the Maven dependency:
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}</version>
|
||||
<version>${version}.RELEASE</version>
|
||||
</dependency>
|
||||
----
|
||||
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository
|
||||
and declare the appropriate dependency version.
|
||||
If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb</artifactId>
|
||||
<version>${version}-SNAPSHOT</version>
|
||||
<version>${version}.BUILD-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
<repository>
|
||||
<id>spring-snapshot</id>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<name>Spring Snapshot Repository</name>
|
||||
<url>https://repo.spring.io/snapshot</url>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
----
|
||||
|
||||
@@ -102,7 +98,7 @@ Some of the changes affect the initial setup configuration as well as compile/ru
|
||||
|
||||
.Changed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | 2.x | 3.x
|
||||
Element / Attribute | 2.x | 3.x
|
||||
|
||||
| `<mongo:mongo-client />`
|
||||
| Used to create a `com.mongodb.MongoClient`
|
||||
@@ -120,7 +116,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
.Removed XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element / Attribute | Replacement in 3.x | Comment
|
||||
Element / Attribute | Replacement in 3.x | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-ref="..." />`
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
@@ -137,7 +133,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
.New XML Namespace Elements and Attributes:
|
||||
|===
|
||||
| Element | Comment
|
||||
Element | Comment
|
||||
|
||||
| `<mongo:db-factory mongo-client-ref="..." />`
|
||||
| Replacement for `<mongo:db-factory mongo-ref="..." />`
|
||||
@@ -157,7 +153,7 @@ Use `<mongo:client-settings cluster-hosts="..." />` instead
|
||||
|
||||
.Java API changes
|
||||
|===
|
||||
| Type | Comment
|
||||
Type | Comment
|
||||
|
||||
| `MongoClientFactoryBean`
|
||||
| Creates `com.mongodb.client.MongoClient` instead of `com.mongodb.MongoClient` +
|
||||
@@ -178,7 +174,7 @@ Uses `MongoClientSettings` instead of `MongoClientOptions`.
|
||||
|
||||
.Removed Java API:
|
||||
|===
|
||||
| 2.x | Replacement in 3.x | Comment
|
||||
2.x | Replacement in 3.x | Comment
|
||||
|
||||
| `MongoClientOptionsFactoryBean`
|
||||
| `MongoClientSettingsFactoryBean`
|
||||
@@ -230,7 +226,6 @@ static class Config extends AbstractMongoClientConfiguration {
|
||||
----
|
||||
====
|
||||
|
||||
[[getting-help]]
|
||||
== Getting Help
|
||||
|
||||
Having trouble with Spring Data? We’d love to help!
|
||||
@@ -244,7 +239,6 @@ If you are just starting out with Spring, try one of the https://spring.io/guide
|
||||
You can also chat with the community on https://gitter.im/spring-projects/spring-data[Gitter].
|
||||
* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues].
|
||||
|
||||
[[reporting-issues]]
|
||||
== Reporting Issues
|
||||
|
||||
Spring Data uses Github as issue tracking system to record bugs and feature requests.
|
||||
@@ -255,85 +249,10 @@ If you want to raise an issue, please follow the recommendations below:
|
||||
* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc.
|
||||
* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++.
|
||||
|
||||
[[guides]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
[[examples]]
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
[[building-from-source]]
|
||||
== Building from Source
|
||||
|
||||
You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io].
|
||||
and accessible from Maven using the Maven configuration noted <<maven-configuration,above>>.
|
||||
|
||||
NOTE: Configuration for Gradle is similar to Maven.
|
||||
|
||||
The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io].
|
||||
Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link]
|
||||
to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=2.5.4&packaging=jar&jvmVersion=1.8&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link]
|
||||
to build a reactive one.
|
||||
|
||||
However, if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper]
|
||||
and minimally JDK 8 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]).
|
||||
|
||||
In order to build Spring Data MongoDB, first you will need to https://www.mongodb.com/try/download/community[download]
|
||||
and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution].
|
||||
|
||||
Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to
|
||||
your MongoDB installation (e.g. `MONGODB_HOME`).
|
||||
|
||||
To run the full test suite a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set] is required.
|
||||
|
||||
To run the MongoDB server enter the following command from a command-line:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0
|
||||
...
|
||||
"msg":"Successfully connected to host"
|
||||
----
|
||||
|
||||
Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_".
|
||||
|
||||
Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set
|
||||
the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`).
|
||||
|
||||
You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started.
|
||||
To initialize the replica set, start a mongo client:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ $MONGODB_HOME/bin/mongo
|
||||
MongoDB server version: 5.0.0
|
||||
...
|
||||
----
|
||||
|
||||
Then enter the following command:
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] })
|
||||
----
|
||||
|
||||
Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`.
|
||||
In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation):
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
$ ulimit -n 32768
|
||||
----
|
||||
|
||||
You can use `ulimit -a` again to verify the `ulimit` on "_open files_" was set appropriately.
|
||||
|
||||
Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command:
|
||||
You don’t need to build from source to use Spring Data (binaries in https://repo.spring.io[repo.spring.io]), but if you want to try out the latest and greatest, Spring Data can be easily built with the https://github.com/takari/maven-wrapper[maven wrapper].
|
||||
You also need JDK 1.8.
|
||||
|
||||
[source,bash]
|
||||
----
|
||||
@@ -342,8 +261,7 @@ Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw
|
||||
|
||||
If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.5.0 or above].
|
||||
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign
|
||||
the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular please sign the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._
|
||||
|
||||
=== Building reference documentation
|
||||
|
||||
@@ -356,7 +274,17 @@ Building the documentation builds also the project without running tests.
|
||||
|
||||
The generated documentation is available from `target/site/reference/html/index.html`.
|
||||
|
||||
[[license]]
|
||||
== Guides
|
||||
|
||||
The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step:
|
||||
|
||||
* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories.
|
||||
* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories.
|
||||
|
||||
== Examples
|
||||
|
||||
* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail.
|
||||
|
||||
== License
|
||||
|
||||
Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license].
|
||||
|
||||
15
ci/openjdk11-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk11-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,18 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk11:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
15
ci/openjdk14-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk14-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk14:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,18 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk16:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -4,15 +4,12 @@ ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.0.23 mongodb-org-server=4.0.23 mongodb-org-shell=4.0.23 mongodb-org-mongos=4.0.23 mongodb-org-tools=4.0.23 ; \
|
||||
apt-get install -y mongodb-org=4.0.14 mongodb-org-server=4.0.14 mongodb-org-shell=4.0.14 mongodb-org-mongos=4.0.14 mongodb-org-tools=4.0.14 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
15
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
15
ci/openjdk8-mongodb-4.2/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv e162f504a20cdf15827f718d4b7c549a058f8b6b ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.2.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
apt-get install -y mongodb-org=4.2.0 mongodb-org-server=4.2.0 mongodb-org-shell=4.2.0 mongodb-org-mongos=4.2.0 mongodb-org-tools=4.2.0 ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update ; \
|
||||
ln -T /bin/true /usr/bin/systemctl ; \
|
||||
apt-get install -y mongodb-org=4.4.4 mongodb-org-server=4.4.4 mongodb-org-shell=4.4.4 mongodb-org-mongos=4.4.4 mongodb-org-tools=4.4.4 ; \
|
||||
rm /usr/bin/systemctl ; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
@@ -1,20 +0,0 @@
|
||||
FROM adoptopenjdk/openjdk8:latest
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN set -eux; \
|
||||
sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \
|
||||
sed -i -e 's/http/https/g' /etc/apt/sources.list ; \
|
||||
apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \
|
||||
# MongoDB 5.0 release signing key
|
||||
apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \
|
||||
# Needed when MongoDB creates a 5.0 folder.
|
||||
echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \
|
||||
echo ${TZ} > /etc/timezone;
|
||||
|
||||
RUN apt-get update; \
|
||||
apt-get install -y mongodb-org=5.0.3 mongodb-org-server=5.0.3 mongodb-org-shell=5.0.3 mongodb-org-mongos=5.0.3 mongodb-org-tools=5.0.3; \
|
||||
apt-get clean; \
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
20
pom.xml
20
pom.xml
@@ -5,7 +5,7 @@
|
||||
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<version>3.0.10.BUILD-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>Spring Data MongoDB</name>
|
||||
@@ -15,7 +15,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data.build</groupId>
|
||||
<artifactId>spring-data-parent</artifactId>
|
||||
<version>2.6.1</version>
|
||||
<version>2.3.10.BUILD-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<modules>
|
||||
@@ -26,8 +26,8 @@
|
||||
<properties>
|
||||
<project.type>multi</project.type>
|
||||
<dist.id>spring-data-mongodb</dist.id>
|
||||
<springdata.commons>2.6.1</springdata.commons>
|
||||
<mongo>4.4.1</mongo>
|
||||
<springdata.commons>2.3.10.BUILD-SNAPSHOT</springdata.commons>
|
||||
<mongo>4.0.6</mongo>
|
||||
<mongo.reactivestreams>${mongo}</mongo.reactivestreams>
|
||||
<jmh.version>1.19</jmh.version>
|
||||
</properties>
|
||||
@@ -134,18 +134,18 @@
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>spring-libs-release</id>
|
||||
<url>https://repo.spring.io/libs-release</url>
|
||||
<id>spring-libs-snapshot</id>
|
||||
<url>https://repo.spring.io/libs-snapshot</url>
|
||||
</repository>
|
||||
<repository>
|
||||
<id>sonatype-libs-snapshot</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
<releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<enabled>false</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
<enabled>true</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<version>3.0.10.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<version>3.0.10.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.data</groupId>
|
||||
<artifactId>spring-data-mongodb-parent</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<version>3.0.10.BUILD-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -87,13 +87,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.code.findbugs</groupId>
|
||||
<artifactId>jsr305</artifactId>
|
||||
<version>3.0.2</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- reactive -->
|
||||
|
||||
<dependency>
|
||||
@@ -143,13 +136,6 @@
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.reactivex.rxjava3</groupId>
|
||||
<artifactId>rxjava</artifactId>
|
||||
<version>${rxjava3}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- CDI -->
|
||||
<!-- Dependency order required to build against CDI 1.0 and test with CDI 2.0 -->
|
||||
<dependency>
|
||||
@@ -206,14 +192,7 @@
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-validator</artifactId>
|
||||
<version>5.4.3.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.glassfish</groupId>
|
||||
<artifactId>javax.el</artifactId>
|
||||
<version>3.0.1-b11</version>
|
||||
<version>5.2.4.Final</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
@@ -317,15 +296,6 @@
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- jMolecules -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.jmolecules</groupId>
|
||||
<artifactId>jmolecules-ddd</artifactId>
|
||||
<version>${jmolecules}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.bson.codecs.DocumentCodec;
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json})
|
||||
* expression. The expression will be wrapped within <code>{ ... }</code> if necessary. The actual parsing and parameter
|
||||
* binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via
|
||||
* {@link #toDocument()}.
|
||||
* <br />
|
||||
*
|
||||
* <pre class="code">
|
||||
* $toUpper : $name -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '$name' } -> { '$toUpper' : '$name' }
|
||||
*
|
||||
* { '$toUpper' : '?0' }, "$name" -> { '$toUpper' : '$name' }
|
||||
* </pre>
|
||||
*
|
||||
* Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry}
|
||||
* containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
*/
|
||||
public class BindableMongoExpression implements MongoExpression {
|
||||
|
||||
private final String expressionString;
|
||||
|
||||
private final @Nullable CodecRegistryProvider codecRegistryProvider;
|
||||
|
||||
private final @Nullable Object[] args;
|
||||
|
||||
private final Lazy<Document> target;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable Object[] args) {
|
||||
this(expression, null, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link BindableMongoExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @param codecRegistryProvider can be {@literal null}.
|
||||
* @param args can be {@literal null}.
|
||||
*/
|
||||
public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider,
|
||||
@Nullable Object[] args) {
|
||||
|
||||
this.expressionString = expression;
|
||||
this.codecRegistryProvider = codecRegistryProvider;
|
||||
this.args = args;
|
||||
this.target = Lazy.of(this::parse);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the {@link CodecRegistry} used to convert expressions.
|
||||
*
|
||||
* @param codecRegistry must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) {
|
||||
return new BindableMongoExpression(expressionString, () -> codecRegistry, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide the arguments to bind to the placeholders via their index.
|
||||
*
|
||||
* @param args must not be {@literal null}.
|
||||
* @return new instance of {@link BindableMongoExpression}.
|
||||
*/
|
||||
public BindableMongoExpression bind(Object... args) {
|
||||
return new BindableMongoExpression(expressionString, codecRegistryProvider, args);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.MongoExpression#toDocument()
|
||||
*/
|
||||
@Override
|
||||
public Document toDocument() {
|
||||
return target.get();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args="
|
||||
+ Arrays.toString(args) + '}';
|
||||
}
|
||||
|
||||
private Document parse() {
|
||||
|
||||
String expression = wrapJsonIfNecessary(expressionString);
|
||||
|
||||
if (ObjectUtils.isEmpty(args)) {
|
||||
|
||||
if (codecRegistryProvider == null) {
|
||||
return Document.parse(expression);
|
||||
}
|
||||
|
||||
return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class)
|
||||
.orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry())));
|
||||
}
|
||||
|
||||
ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec()
|
||||
: new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry());
|
||||
return codec.decode(expression, args);
|
||||
}
|
||||
|
||||
private static String wrapJsonIfNecessary(String json) {
|
||||
|
||||
if (StringUtils.hasText(json) && (json.startsWith("{") && json.endsWith("}"))) {
|
||||
return json;
|
||||
}
|
||||
|
||||
return "{" + json + "}";
|
||||
}
|
||||
}
|
||||
@@ -20,8 +20,8 @@ import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Helper class featuring helper methods for working with MongoDb collections.
|
||||
* <br />
|
||||
* <br />
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Mainly intended for internal use within the framework.
|
||||
*
|
||||
* @author Thomas Risberg
|
||||
|
||||
@@ -30,7 +30,7 @@ import com.mongodb.client.MongoDatabase;
|
||||
* Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining
|
||||
* {@link ClientSession session bound} resources, such as {@link MongoDatabase} and
|
||||
* {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -43,7 +43,7 @@ public class MongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -56,7 +56,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -71,7 +71,7 @@ public class MongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -85,7 +85,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current
|
||||
* {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -104,8 +104,7 @@ public class MongoDatabaseUtils {
|
||||
|
||||
Assert.notNull(factory, "Factory must not be null!");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER
|
||||
|| !TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
if (!TransactionSynchronizationManager.isSynchronizationActive()) {
|
||||
return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
/**
|
||||
* Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when
|
||||
* passed on to the driver.
|
||||
* <br />
|
||||
* A set of predefined {@link MongoExpression expressions}, including a
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method
|
||||
* like expressions (eg. {@code toUpper(name)}) are available via the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation Aggregation API}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.2
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ArrayOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConvertOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.DateOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ObjectOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.SetOperators
|
||||
* @see org.springframework.data.mongodb.core.aggregation.StringOperators
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface MongoExpression {
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}). <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression) {
|
||||
return new BindableMongoExpression(expression, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0})
|
||||
* that will be resolved on first call of {@link #toDocument()}. <br />
|
||||
* The given expression will be wrapped with <code>{ ... }</code> to match an actual MongoDB {@link org.bson.Document}
|
||||
* if necessary.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MongoExpression}.
|
||||
*/
|
||||
static MongoExpression create(String expression, Object... args) {
|
||||
return new BindableMongoExpression(expression, args);
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the native {@link org.bson.Document} representation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
org.bson.Document toDocument();
|
||||
}
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}.
|
||||
* {@link MongoTransactionManager} binds instances of this class to the thread.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
|
||||
@@ -37,18 +37,18 @@ import com.mongodb.client.ClientSession;
|
||||
/**
|
||||
* A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages
|
||||
* {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
|
||||
* <br />
|
||||
* <p />
|
||||
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal
|
||||
* consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction()
|
||||
* commit} or {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <br />
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via
|
||||
* {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard
|
||||
* {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as
|
||||
* {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
|
||||
* <br />
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override
|
||||
* {@link #doCommit(MongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -69,11 +69,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
|
||||
/**
|
||||
* Create a new {@link MongoTransactionManager} for bean-style usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link MongoDatabaseFactory db factory} has to be
|
||||
* {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a
|
||||
* {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
@@ -212,8 +212,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* By default those labels are ignored, nevertheless one might check for
|
||||
* {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the
|
||||
* commit. <br />
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre>
|
||||
* int retries = 3;
|
||||
* do {
|
||||
* try {
|
||||
@@ -226,8 +226,8 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager
|
||||
* }
|
||||
* Thread.sleep(500);
|
||||
* } while (--retries > 0);
|
||||
* </pre>
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* @param transactionObject never {@literal null}.
|
||||
* @throws Exception in case of transaction errors.
|
||||
|
||||
@@ -36,7 +36,7 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
* Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for
|
||||
* obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection}
|
||||
* suitable for transactional usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
@@ -75,7 +75,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using
|
||||
* {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -88,7 +88,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
/**
|
||||
* Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -104,7 +104,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -119,7 +119,7 @@ public class ReactiveMongoDatabaseUtils {
|
||||
/**
|
||||
* Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory
|
||||
* factory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber
|
||||
* {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}.
|
||||
*
|
||||
@@ -138,10 +138,6 @@ public class ReactiveMongoDatabaseUtils {
|
||||
|
||||
Assert.notNull(factory, "DatabaseFactory must not be null!");
|
||||
|
||||
if (sessionSynchronization == SessionSynchronization.NEVER) {
|
||||
return getMongoDatabaseOrDefault(dbName, factory);
|
||||
}
|
||||
|
||||
return TransactionSynchronizationManager.forCurrentTransaction()
|
||||
.filter(TransactionSynchronizationManager::isSynchronizationActive) //
|
||||
.flatMap(synchronizationManager -> {
|
||||
|
||||
@@ -24,7 +24,7 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
/**
|
||||
* MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds
|
||||
* instances of this class to the subscriber context.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> Intended for internal usage only.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
|
||||
@@ -38,21 +38,21 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
* A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Binds a {@link ClientSession} from the specified
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber
|
||||
* {@link reactor.util.context.Context}.
|
||||
* <br />
|
||||
* <p />
|
||||
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a
|
||||
* {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start},
|
||||
* {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or
|
||||
* {@link ClientSession#abortTransaction() abort} a transaction.
|
||||
* <br />
|
||||
* <p />
|
||||
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via
|
||||
* {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead
|
||||
* of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring
|
||||
* classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly.
|
||||
* <br />
|
||||
* <p />
|
||||
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override
|
||||
* {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the
|
||||
* <a href="https://docs.mongodb.com/manual/core/transactions/#retry-commit-operation">Retry Commit Operation</a>
|
||||
@@ -71,11 +71,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction
|
||||
|
||||
/**
|
||||
* Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong>The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to
|
||||
* be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor
|
||||
* to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining
|
||||
* {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}.
|
||||
*
|
||||
|
||||
@@ -35,7 +35,7 @@ import com.mongodb.session.ClientSession;
|
||||
/**
|
||||
* {@link MethodInterceptor} implementation looking up and invoking an alternative target method having
|
||||
* {@link ClientSession} as its first argument. This allows seamless integration with the existing code base.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself
|
||||
* like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them
|
||||
* if not already proxied.
|
||||
|
||||
@@ -15,20 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb;
|
||||
|
||||
import org.springframework.data.mongodb.core.MongoTemplate;
|
||||
import org.springframework.data.mongodb.core.ReactiveMongoTemplate;
|
||||
|
||||
/**
|
||||
* {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to
|
||||
* participate if any.
|
||||
* {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to
|
||||
* define in which type of transactions to participate if any.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see MongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization)
|
||||
* @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization)
|
||||
* @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization)
|
||||
*/
|
||||
public enum SessionSynchronization {
|
||||
|
||||
@@ -41,12 +34,5 @@ public enum SessionSynchronization {
|
||||
/**
|
||||
* Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}.
|
||||
*/
|
||||
ON_ACTUAL_TRANSACTION,
|
||||
|
||||
/**
|
||||
* Do not participate in ongoing transactions.
|
||||
*
|
||||
* @since 3.2.5
|
||||
*/
|
||||
NEVER;
|
||||
ON_ACTUAL_TRANSACTION;
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ public class SpringDataMongoDB {
|
||||
|
||||
/**
|
||||
* Fetches the "Implementation-Version" manifest attribute from the jar file.
|
||||
* <br />
|
||||
* <p />
|
||||
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the
|
||||
* version in all environments. In this case the current Major version is returned as a fallback.
|
||||
*
|
||||
|
||||
@@ -61,8 +61,8 @@ public @interface EnableMongoAuditing {
|
||||
boolean modifyOnCreate() default true;
|
||||
|
||||
/**
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting
|
||||
* creation and modification dates.
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the {@link org.joda.time.DateTime} to be
|
||||
* used for setting creation and modification dates.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Inherited;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.data.auditing.DateTimeProvider;
|
||||
import org.springframework.data.domain.ReactiveAuditorAware;
|
||||
|
||||
/**
|
||||
* Annotation to enable auditing in MongoDB using reactive infrastructure via annotation configuration.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 3.1
|
||||
*/
|
||||
@Inherited
|
||||
@Documented
|
||||
@Target(ElementType.TYPE)
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Import(ReactiveMongoAuditingRegistrar.class)
|
||||
public @interface EnableReactiveMongoAuditing {
|
||||
|
||||
/**
|
||||
* Configures the {@link ReactiveAuditorAware} bean to be used to lookup the current principal.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String auditorAwareRef() default "";
|
||||
|
||||
/**
|
||||
* Configures whether the creation and modification dates are set. Defaults to {@literal true}.
|
||||
*
|
||||
* @return {@literal true} by default.
|
||||
*/
|
||||
boolean setDates() default true;
|
||||
|
||||
/**
|
||||
* Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}.
|
||||
*
|
||||
* @return {@literal true} by default.
|
||||
*/
|
||||
boolean modifyOnCreate() default true;
|
||||
|
||||
/**
|
||||
* Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting
|
||||
* creation and modification dates.
|
||||
*
|
||||
* @return empty {@link String} by default.
|
||||
*/
|
||||
String dateTimeProviderRef() default "";
|
||||
}
|
||||
@@ -17,6 +17,7 @@ package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
@@ -27,8 +28,14 @@ import org.springframework.data.auditing.IsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableMongoAuditing} annotation.
|
||||
@@ -39,6 +46,9 @@ import org.springframework.util.Assert;
|
||||
*/
|
||||
class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono",
|
||||
MongoAuditingRegistrar.class.getClassLoader());
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
@@ -81,7 +91,7 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(MongoMappingContextLookup.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
@@ -106,6 +116,68 @@ class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(),
|
||||
AuditingEntityCallback.class.getName(), registry);
|
||||
|
||||
if (PROJECT_REACTOR_AVAILABLE) {
|
||||
registerReactiveAuditingEntityCallback(registry, auditingHandlerDefinition.getSource());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, Object source) {
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(source);
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple helper to be able to wire the {@link MappingContext} from a {@link MappingMongoConverter} bean available in
|
||||
* the application context.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
*/
|
||||
static class MongoMappingContextLookup
|
||||
implements FactoryBean<MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty>> {
|
||||
|
||||
private final MappingMongoConverter converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoMappingContextLookup} for the given {@link MappingMongoConverter}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public MongoMappingContextLookup(MappingMongoConverter converter) {
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> getObject() throws Exception {
|
||||
return converter.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MappingContext.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#isSingleton()
|
||||
*/
|
||||
@Override
|
||||
public boolean isSingleton() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
import org.springframework.core.convert.converter.Converter;
|
||||
import org.springframework.core.type.filter.AnnotationTypeFilter;
|
||||
import org.springframework.data.annotation.Persistent;
|
||||
import org.springframework.data.convert.CustomConversions;
|
||||
import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy;
|
||||
import org.springframework.data.mapping.model.FieldNamingStrategy;
|
||||
@@ -139,7 +140,8 @@ public abstract class MongoConfigurationSupport {
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}.
|
||||
* Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and
|
||||
* {@link Persistent}.
|
||||
*
|
||||
* @param basePackage must not be {@literal null}.
|
||||
* @return
|
||||
@@ -159,6 +161,7 @@ public abstract class MongoConfigurationSupport {
|
||||
ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(
|
||||
false);
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class));
|
||||
componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class));
|
||||
|
||||
for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) {
|
||||
|
||||
@@ -172,7 +175,8 @@ public abstract class MongoConfigurationSupport {
|
||||
|
||||
/**
|
||||
* Configures whether to abbreviate field names for domain objects by configuring a
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created.
|
||||
* {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced
|
||||
* customization needs, consider overriding {@link #mappingMongoConverter()}.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
|
||||
@@ -22,12 +22,9 @@ import java.util.Map;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.config.CustomEditorConfigurer;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionValidationException;
|
||||
import org.springframework.beans.factory.support.ManagedMap;
|
||||
import org.springframework.beans.factory.xml.BeanDefinitionParser;
|
||||
import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean;
|
||||
import org.springframework.data.mongodb.core.MongoServerApiFactoryBean;
|
||||
import org.springframework.util.StringUtils;
|
||||
import org.springframework.util.xml.DomUtils;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
@@ -115,20 +112,6 @@ abstract class MongoParsingUtils {
|
||||
// Field level encryption
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings");
|
||||
|
||||
// ServerAPI
|
||||
if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) {
|
||||
|
||||
MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean();
|
||||
serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version"));
|
||||
try {
|
||||
clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject());
|
||||
} catch (Exception exception) {
|
||||
throw new BeanDefinitionValidationException("Non parsable server-api.", exception);
|
||||
}
|
||||
} else {
|
||||
setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi");
|
||||
}
|
||||
|
||||
// and the rest
|
||||
|
||||
mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition());
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.data.mapping.context.PersistentEntities;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
|
||||
/**
|
||||
* Simple helper to be able to wire the {@link PersistentEntities} from a {@link MappingMongoConverter} bean available
|
||||
* in the application context.
|
||||
*
|
||||
* @author Oliver Gierke
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @since 3.1
|
||||
*/
|
||||
public class PersistentEntitiesFactoryBean implements FactoryBean<PersistentEntities> {
|
||||
|
||||
private final MappingMongoConverter converter;
|
||||
|
||||
/**
|
||||
* Creates a new {@link PersistentEntitiesFactoryBean} for the given {@link MappingMongoConverter}.
|
||||
*
|
||||
* @param converter must not be {@literal null}.
|
||||
*/
|
||||
public PersistentEntitiesFactoryBean(MappingMongoConverter converter) {
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObject()
|
||||
*/
|
||||
@Override
|
||||
public PersistentEntities getObject() {
|
||||
return PersistentEntities.of(converter.getMappingContext());
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
|
||||
*/
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return PersistentEntities.class;
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
/*
|
||||
* Copyright 2020-2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.config;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.beans.factory.support.AbstractBeanDefinition;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
|
||||
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
|
||||
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
|
||||
import org.springframework.core.type.AnnotationMetadata;
|
||||
import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler;
|
||||
import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport;
|
||||
import org.springframework.data.auditing.config.AuditingConfiguration;
|
||||
import org.springframework.data.config.ParsingUtils;
|
||||
import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
/**
|
||||
* {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @since 3.1
|
||||
*/
|
||||
class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport {
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation()
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends Annotation> getAnnotation() {
|
||||
return EnableReactiveMongoAuditing.class;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName()
|
||||
*/
|
||||
@Override
|
||||
protected String getAuditingHandlerBeanName() {
|
||||
return "reactiveMongoAuditingHandler";
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration)
|
||||
*/
|
||||
@Override
|
||||
protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) {
|
||||
|
||||
Assert.notNull(configuration, "AuditingConfiguration must not be null!");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class);
|
||||
|
||||
BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(PersistentEntitiesFactoryBean.class);
|
||||
definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR);
|
||||
|
||||
builder.addConstructorArgValue(definition.getBeanDefinition());
|
||||
return configureDefaultAuditHandlerAttributes(configuration, builder);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry)
|
||||
*/
|
||||
@Override
|
||||
protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition,
|
||||
BeanDefinitionRegistry registry) {
|
||||
|
||||
Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!");
|
||||
Assert.notNull(registry, "BeanDefinitionRegistry must not be null!");
|
||||
|
||||
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class);
|
||||
|
||||
builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry));
|
||||
builder.getRawBeanDefinition().setSource(auditingHandlerDefinition.getSource());
|
||||
|
||||
registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(),
|
||||
registry);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -27,9 +29,7 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping;
|
||||
import org.springframework.data.mongodb.core.aggregation.CountOperation;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
@@ -37,7 +37,6 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.query.CriteriaDefinition;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -50,50 +49,34 @@ import org.springframework.util.ObjectUtils;
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@AllArgsConstructor
|
||||
class AggregationUtil {
|
||||
|
||||
QueryMapper queryMapper;
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
Lazy<AggregationOperationContext> untypedMappingContext;
|
||||
|
||||
AggregationUtil(QueryMapper queryMapper,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
/**
|
||||
* Prepare the {@link AggregationOperationContext} for a given aggregation by either returning the context itself it
|
||||
* is not {@literal null}, create a {@link TypeBasedAggregationOperationContext} if the aggregation contains type
|
||||
* information (is a {@link TypedAggregation}) or use the {@link Aggregation#DEFAULT_CONTEXT}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param context can be {@literal null}.
|
||||
* @return the root {@link AggregationOperationContext} to use.
|
||||
*/
|
||||
AggregationOperationContext prepareAggregationContext(Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext context) {
|
||||
|
||||
this.queryMapper = queryMapper;
|
||||
this.mappingContext = mappingContext;
|
||||
this.untypedMappingContext = Lazy
|
||||
.of(() -> new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
|
||||
DomainTypeMapping domainTypeMapping = aggregation.getOptions().getDomainTypeMapping();
|
||||
|
||||
if (domainTypeMapping == DomainTypeMapping.NONE) {
|
||||
return Aggregation.DEFAULT_CONTEXT;
|
||||
if (context != null) {
|
||||
return context;
|
||||
}
|
||||
|
||||
if (!(aggregation instanceof TypedAggregation)) {
|
||||
|
||||
if(inputType == null) {
|
||||
return untypedMappingContext.get();
|
||||
}
|
||||
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
return new TypeBasedAggregationOperationContext(((TypedAggregation) aggregation).getInputType(), mappingContext,
|
||||
queryMapper);
|
||||
}
|
||||
|
||||
inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
if (domainTypeMapping == DomainTypeMapping.STRICT
|
||||
&& !aggregation.getPipeline().containsUnionWith()) {
|
||||
return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
}
|
||||
|
||||
return new RelaxedTypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper);
|
||||
return Aggregation.DEFAULT_CONTEXT;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -105,7 +88,7 @@ class AggregationUtil {
|
||||
*/
|
||||
List<Document> createPipeline(Aggregation aggregation, AggregationOperationContext context) {
|
||||
|
||||
if (ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) {
|
||||
return aggregation.toPipeline(context);
|
||||
}
|
||||
|
||||
@@ -132,6 +115,53 @@ class AggregationUtil {
|
||||
return command;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@code $count} aggregation for {@link Query} and optionally a {@link Class entity class}.
|
||||
*
|
||||
* @param query must not be {@literal null}.
|
||||
* @param entityClass can be {@literal null} if the {@link Query} object is empty.
|
||||
* @return the {@link Aggregation} pipeline definition to run a {@code $count} aggregation.
|
||||
*/
|
||||
Aggregation createCountAggregation(Query query, @Nullable Class<?> entityClass) {
|
||||
|
||||
List<AggregationOperation> pipeline = computeCountAggregationPipeline(query, entityClass);
|
||||
|
||||
Aggregation aggregation = entityClass != null ? Aggregation.newAggregation(entityClass, pipeline)
|
||||
: Aggregation.newAggregation(pipeline);
|
||||
aggregation.withOptions(AggregationOptions.builder().collation(query.getCollation().orElse(null)).build());
|
||||
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
private List<AggregationOperation> computeCountAggregationPipeline(Query query, @Nullable Class<?> entityType) {
|
||||
|
||||
CountOperation count = Aggregation.count().as("totalEntityCount");
|
||||
if (query.getQueryObject().isEmpty()) {
|
||||
return Collections.singletonList(count);
|
||||
}
|
||||
|
||||
Assert.notNull(entityType, "Entity type must not be null!");
|
||||
|
||||
Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(),
|
||||
mappingContext.getPersistentEntity(entityType));
|
||||
|
||||
CriteriaDefinition criteria = new CriteriaDefinition() {
|
||||
|
||||
@Override
|
||||
public Document getCriteriaObject() {
|
||||
return mappedQuery;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getKey() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return Arrays.asList(Aggregation.match(criteria), count);
|
||||
}
|
||||
|
||||
private List<Document> mapAggregationPipeline(List<Document> pipeline) {
|
||||
|
||||
return pipeline.stream().map(val -> queryMapper.getMappedObject(val, Optional.empty()))
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
|
||||
|
||||
@@ -25,7 +27,6 @@ import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.messaging.Message;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.client.model.changestream.ChangeStreamDocument;
|
||||
import com.mongodb.client.model.changestream.OperationType;
|
||||
@@ -38,6 +39,7 @@ import com.mongodb.client.model.changestream.OperationType;
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@EqualsAndHashCode
|
||||
public class ChangeStreamEvent<T> {
|
||||
|
||||
@SuppressWarnings("rawtypes") //
|
||||
@@ -185,8 +187,8 @@ public class ChangeStreamEvent<T> {
|
||||
return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this);
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(
|
||||
String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType));
|
||||
throw new IllegalArgumentException(String.format("No converter found capable of converting %s to %s",
|
||||
fullDocument.getClass(), targetType));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -197,27 +199,4 @@ public class ChangeStreamEvent<T> {
|
||||
public String toString() {
|
||||
return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ChangeStreamEvent<?> that = (ChangeStreamEvent<?>) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.raw, that.raw)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.targetType, that.targetType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = raw != null ? raw.hashCode() : 0;
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(targetType);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
@@ -23,6 +25,7 @@ import org.bson.BsonDocument;
|
||||
import org.bson.BsonTimestamp;
|
||||
import org.bson.BsonValue;
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -42,6 +45,7 @@ import com.mongodb.client.model.changestream.FullDocument;
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
*/
|
||||
@EqualsAndHashCode
|
||||
public class ChangeStreamOptions {
|
||||
|
||||
private @Nullable Object filter;
|
||||
@@ -152,44 +156,6 @@ public class ChangeStreamOptions {
|
||||
+ ObjectUtils.nullSafeClassName(timestamp));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ChangeStreamOptions that = (ChangeStreamOptions) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.filter, that.filter)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.resumeToken, that.resumeToken)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.resumeTimestamp, that.resumeTimestamp)) {
|
||||
return false;
|
||||
}
|
||||
return resume == that.resume;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(filter);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(collation);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(resume);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
@@ -242,13 +208,13 @@ public class ChangeStreamOptions {
|
||||
|
||||
/**
|
||||
* Set the filter to apply.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Fields on aggregation expression root level are prefixed to map to fields contained in
|
||||
* {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns},
|
||||
* {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken
|
||||
* as given, during the mapping procedure. You may want to have a look at the
|
||||
* <a href="https://docs.mongodb.com/manual/reference/change-events/">structure of Change Events</a>.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are
|
||||
* mapped to domain type fields.
|
||||
*
|
||||
|
||||
@@ -15,13 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.timeseries.GranularityDefinition;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -45,7 +44,6 @@ public class CollectionOptions {
|
||||
private @Nullable Boolean capped;
|
||||
private @Nullable Collation collation;
|
||||
private ValidationOptions validationOptions;
|
||||
private @Nullable TimeSeriesOptions timeSeriesOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CollectionOptions</code> instance.
|
||||
@@ -58,19 +56,17 @@ public class CollectionOptions {
|
||||
*/
|
||||
@Deprecated
|
||||
public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) {
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none(), null);
|
||||
this(size, maxDocuments, capped, null, ValidationOptions.none());
|
||||
}
|
||||
|
||||
private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped,
|
||||
@Nullable Collation collation, ValidationOptions validationOptions,
|
||||
@Nullable TimeSeriesOptions timeSeriesOptions) {
|
||||
@Nullable Collation collation, ValidationOptions validationOptions) {
|
||||
|
||||
this.maxDocuments = maxDocuments;
|
||||
this.size = size;
|
||||
this.capped = capped;
|
||||
this.collation = collation;
|
||||
this.validationOptions = validationOptions;
|
||||
this.timeSeriesOptions = timeSeriesOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -84,7 +80,7 @@ public class CollectionOptions {
|
||||
|
||||
Assert.notNull(collation, "Collation must not be null!");
|
||||
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null);
|
||||
return new CollectionOptions(null, null, null, collation, ValidationOptions.none());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -94,21 +90,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public static CollectionOptions empty() {
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use
|
||||
* {@link #timeSeries(TimeSeriesOptions)}.
|
||||
*
|
||||
* @param timeField The name of the property which contains the date in each time series document. Must not be
|
||||
* {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @see #timeSeries(TimeSeriesOptions)
|
||||
* @since 3.3
|
||||
*/
|
||||
public static CollectionOptions timeSeries(String timeField) {
|
||||
return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField));
|
||||
return new CollectionOptions(null, null, null, null, ValidationOptions.none());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -119,7 +101,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions capped() {
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null);
|
||||
return new CollectionOptions(size, maxDocuments, true, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -130,7 +112,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions maxDocuments(long maxDocuments) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -141,7 +123,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions size(long size) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -152,7 +134,7 @@ public class CollectionOptions {
|
||||
* @since 2.0
|
||||
*/
|
||||
public CollectionOptions collation(@Nullable Collation collation) {
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -272,20 +254,7 @@ public class CollectionOptions {
|
||||
public CollectionOptions validation(ValidationOptions validationOptions) {
|
||||
|
||||
Assert.notNull(validationOptions, "ValidationOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}.
|
||||
*
|
||||
* @param timeSeriesOptions must not be {@literal null}.
|
||||
* @return new instance of {@link CollectionOptions}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) {
|
||||
|
||||
Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!");
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions);
|
||||
return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -336,16 +305,6 @@ public class CollectionOptions {
|
||||
return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the {@link TimeSeriesOptions} if available.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not specified.
|
||||
* @since 3.3
|
||||
*/
|
||||
public Optional<TimeSeriesOptions> getTimeSeriesOptions() {
|
||||
return Optional.ofNullable(timeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulation of ValidationOptions options.
|
||||
*
|
||||
@@ -353,6 +312,7 @@ public class CollectionOptions {
|
||||
* @author Andreas Zink
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
public static class ValidationOptions {
|
||||
|
||||
private static final ValidationOptions NONE = new ValidationOptions(null, null, null);
|
||||
@@ -361,13 +321,6 @@ public class CollectionOptions {
|
||||
private final @Nullable ValidationLevel validationLevel;
|
||||
private final @Nullable ValidationAction validationAction;
|
||||
|
||||
public ValidationOptions(Validator validator, ValidationLevel validationLevel, ValidationAction validationAction) {
|
||||
|
||||
this.validator = validator;
|
||||
this.validationLevel = validationLevel;
|
||||
this.validationAction = validationAction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an empty {@link ValidationOptions}.
|
||||
*
|
||||
@@ -428,7 +381,7 @@ public class CollectionOptions {
|
||||
/**
|
||||
* Get the {@code validationAction} to perform.
|
||||
*
|
||||
* @return {@link Optional#empty()} if not set.
|
||||
* @return @return {@link Optional#empty()} if not set.
|
||||
*/
|
||||
public Optional<ValidationAction> getValidationAction() {
|
||||
return Optional.ofNullable(validationAction);
|
||||
@@ -441,89 +394,4 @@ public class CollectionOptions {
|
||||
return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options applicable to Time Series collections.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/core/timeseries-collections">https://docs.mongodb.com/manual/core/timeseries-collections</a>
|
||||
*/
|
||||
public static class TimeSeriesOptions {
|
||||
|
||||
private final String timeField;
|
||||
|
||||
private @Nullable final String metaField;
|
||||
|
||||
private final GranularityDefinition granularity;
|
||||
|
||||
private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) {
|
||||
|
||||
Assert.hasText(timeField, "Time field must not be empty or null!");
|
||||
|
||||
this.timeField = timeField;
|
||||
this.metaField = metaField;
|
||||
this.granularity = granularity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one,
|
||||
* that contains the date in each time series document. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param timeField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public static TimeSeriesOptions timeSeries(String timeField) {
|
||||
return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the name of the field which contains metadata in each time series document. Should not be the {@literal id}
|
||||
* nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or
|
||||
* {@link java.util.Collection}. <br />
|
||||
* {@link Field#name() Annotated fieldnames} will be considered during the mapping process.
|
||||
*
|
||||
* @param metaField must not be {@literal null}.
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
*/
|
||||
public TimeSeriesOptions metaField(String metaField) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized.
|
||||
* Select one that is closest to the time span between incoming measurements.
|
||||
*
|
||||
* @return new instance of {@link TimeSeriesOptions}.
|
||||
* @see Granularity
|
||||
*/
|
||||
public TimeSeriesOptions granularity(GranularityDefinition granularity) {
|
||||
return new TimeSeriesOptions(timeField, metaField, granularity);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public String getTimeField() {
|
||||
return timeField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via
|
||||
* {@link org.springframework.util.StringUtils#hasText(String)}.
|
||||
*/
|
||||
@Nullable
|
||||
public String getMetaField() {
|
||||
return metaField;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public GranularityDefinition getGranularity() {
|
||||
return granularity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
@@ -24,9 +27,8 @@ import java.util.stream.Collectors;
|
||||
import org.bson.Document;
|
||||
import org.bson.conversions.Bson;
|
||||
import org.springframework.context.ApplicationEventPublisher;
|
||||
import org.springframework.dao.DataIntegrityViolationException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mongodb.BulkOperationException;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
@@ -45,9 +47,7 @@ import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.util.Pair;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.MongoBulkWriteException;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.bulk.BulkWriteResult;
|
||||
import com.mongodb.client.MongoCollection;
|
||||
@@ -64,7 +64,6 @@ import com.mongodb.client.model.*;
|
||||
* @author Jens Schauder
|
||||
* @author Michail Nikolaev
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Jacob Botuck
|
||||
* @since 1.9
|
||||
*/
|
||||
class DefaultBulkOperations implements BulkOperations {
|
||||
@@ -74,6 +73,7 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
private final BulkOperationContext bulkOperationContext;
|
||||
private final List<SourceAwareWriteModelHolder> models = new ArrayList<>();
|
||||
|
||||
private PersistenceExceptionTranslator exceptionTranslator;
|
||||
private @Nullable WriteConcern defaultWriteConcern;
|
||||
|
||||
private BulkWriteOptions bulkOptions;
|
||||
@@ -97,9 +97,19 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
this.mongoOperations = mongoOperations;
|
||||
this.collectionName = collectionName;
|
||||
this.bulkOperationContext = bulkOperationContext;
|
||||
this.exceptionTranslator = new MongoExceptionTranslator();
|
||||
this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode());
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}.
|
||||
*
|
||||
* @param exceptionTranslator can be {@literal null}.
|
||||
*/
|
||||
public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) {
|
||||
this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}.
|
||||
*
|
||||
@@ -306,26 +316,11 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
collection = collection.withWriteConcern(defaultWriteConcern);
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
} catch (RuntimeException ex) {
|
||||
|
||||
if (ex instanceof MongoBulkWriteException) {
|
||||
|
||||
MongoBulkWriteException mongoBulkWriteException = (MongoBulkWriteException) ex;
|
||||
if (mongoBulkWriteException.getWriteConcernError() != null) {
|
||||
throw new DataIntegrityViolationException(ex.getMessage(), ex);
|
||||
}
|
||||
throw new BulkOperationException(ex.getMessage(), mongoBulkWriteException);
|
||||
}
|
||||
|
||||
throw ex;
|
||||
}
|
||||
return collection.bulkWrite( //
|
||||
models.stream() //
|
||||
.map(this::extractAndMapWriteModel) //
|
||||
.collect(Collectors.toList()), //
|
||||
bulkOptions);
|
||||
}
|
||||
|
||||
private WriteModel<Document> extractAndMapWriteModel(SourceAwareWriteModelHolder it) {
|
||||
@@ -552,93 +547,15 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
static final class BulkOperationContext {
|
||||
@Value
|
||||
static class BulkOperationContext {
|
||||
|
||||
private final BulkMode bulkMode;
|
||||
private final Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
private final QueryMapper queryMapper;
|
||||
private final UpdateMapper updateMapper;
|
||||
private final ApplicationEventPublisher eventPublisher;
|
||||
private final EntityCallbacks entityCallbacks;
|
||||
|
||||
BulkOperationContext(BulkOperations.BulkMode bulkMode, Optional<? extends MongoPersistentEntity<?>> entity,
|
||||
QueryMapper queryMapper, UpdateMapper updateMapper, ApplicationEventPublisher eventPublisher,
|
||||
EntityCallbacks entityCallbacks) {
|
||||
|
||||
this.bulkMode = bulkMode;
|
||||
this.entity = entity;
|
||||
this.queryMapper = queryMapper;
|
||||
this.updateMapper = updateMapper;
|
||||
this.eventPublisher = eventPublisher;
|
||||
this.entityCallbacks = entityCallbacks;
|
||||
}
|
||||
|
||||
public BulkMode getBulkMode() {
|
||||
return this.bulkMode;
|
||||
}
|
||||
|
||||
public Optional<? extends MongoPersistentEntity<?>> getEntity() {
|
||||
return this.entity;
|
||||
}
|
||||
|
||||
public QueryMapper getQueryMapper() {
|
||||
return this.queryMapper;
|
||||
}
|
||||
|
||||
public UpdateMapper getUpdateMapper() {
|
||||
return this.updateMapper;
|
||||
}
|
||||
|
||||
public ApplicationEventPublisher getEventPublisher() {
|
||||
return this.eventPublisher;
|
||||
}
|
||||
|
||||
public EntityCallbacks getEntityCallbacks() {
|
||||
return this.entityCallbacks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
BulkOperationContext that = (BulkOperationContext) o;
|
||||
|
||||
if (bulkMode != that.bulkMode)
|
||||
return false;
|
||||
if (!ObjectUtils.nullSafeEquals(this.entity, that.entity)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.queryMapper, that.queryMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.updateMapper, that.updateMapper)) {
|
||||
return false;
|
||||
}
|
||||
if (!ObjectUtils.nullSafeEquals(this.eventPublisher, that.eventPublisher)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.entityCallbacks, that.entityCallbacks);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = bulkMode != null ? bulkMode.hashCode() : 0;
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entity);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(queryMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(updateMapper);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(eventPublisher);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(entityCallbacks);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.BulkOperationContext(bulkMode=" + this.getBulkMode() + ", entity="
|
||||
+ this.getEntity() + ", queryMapper=" + this.getQueryMapper() + ", updateMapper=" + this.getUpdateMapper()
|
||||
+ ", eventPublisher=" + this.getEventPublisher() + ", entityCallbacks=" + this.getEntityCallbacks() + ")";
|
||||
}
|
||||
@NonNull BulkMode bulkMode;
|
||||
@NonNull Optional<? extends MongoPersistentEntity<?>> entity;
|
||||
@NonNull QueryMapper queryMapper;
|
||||
@NonNull UpdateMapper updateMapper;
|
||||
ApplicationEventPublisher eventPublisher;
|
||||
EntityCallbacks entityCallbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -647,50 +564,10 @@ class DefaultBulkOperations implements BulkOperations {
|
||||
* @since 2.2
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
private static final class SourceAwareWriteModelHolder {
|
||||
@Value
|
||||
private static class SourceAwareWriteModelHolder {
|
||||
|
||||
private final Object source;
|
||||
private final WriteModel<Document> model;
|
||||
|
||||
SourceAwareWriteModelHolder(Object source, WriteModel<Document> model) {
|
||||
|
||||
this.source = source;
|
||||
this.model = model;
|
||||
}
|
||||
|
||||
public Object getSource() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
public WriteModel<Document> getModel() {
|
||||
return this.model;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
SourceAwareWriteModelHolder that = (SourceAwareWriteModelHolder) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.source, that.source)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.model, that.model);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(model);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(source);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DefaultBulkOperations.SourceAwareWriteModelHolder(source=" + this.getSource() + ", model="
|
||||
+ this.getModel() + ")";
|
||||
}
|
||||
Object source;
|
||||
WriteModel<Document> model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ class DefaultIndexOperationsProvider implements IndexOperationsProvider {
|
||||
* @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public IndexOperations indexOps(String collectionName, Class<?> type) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type);
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Encryption algorithms supported by MongoDB Client Side Field Level Encryption.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public final class EncryptionAlgorithms {
|
||||
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic";
|
||||
public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random";
|
||||
|
||||
}
|
||||
@@ -15,8 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
@@ -29,23 +32,18 @@ import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.PersistentPropertyAccessor;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mapping.model.ConvertingPropertyAccessor;
|
||||
import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions;
|
||||
import org.springframework.data.mongodb.core.convert.MongoWriter;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.TimeSeries;
|
||||
import org.springframework.data.mongodb.core.query.Collation;
|
||||
import org.springframework.data.mongodb.core.query.Criteria;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.LinkedMultiValueMap;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Common operations performed on an entity in the context of it's mapping metadata.
|
||||
@@ -57,15 +55,12 @@ import org.springframework.util.StringUtils;
|
||||
* @see MongoTemplate
|
||||
* @see ReactiveMongoTemplate
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class EntityOperations {
|
||||
|
||||
private static final String ID_FIELD = "_id";
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
|
||||
EntityOperations(MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
this.context = context;
|
||||
}
|
||||
private final @NonNull MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context;
|
||||
|
||||
/**
|
||||
* Creates a new {@link Entity} for the given bean.
|
||||
@@ -74,7 +69,7 @@ class EntityOperations {
|
||||
* @return new instance of {@link Entity}.
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
<T> Entity<T> forEntity(T entity) {
|
||||
public <T> Entity<T> forEntity(T entity) {
|
||||
|
||||
Assert.notNull(entity, "Bean must not be null!");
|
||||
|
||||
@@ -97,7 +92,7 @@ class EntityOperations {
|
||||
* @return new instance of {@link AdaptibleEntity}.
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
<T> AdaptibleEntity<T> forEntity(T entity, ConversionService conversionService) {
|
||||
public <T> AdaptibleEntity<T> forEntity(T entity, ConversionService conversionService) {
|
||||
|
||||
Assert.notNull(entity, "Bean must not be null!");
|
||||
Assert.notNull(conversionService, "ConversionService must not be null!");
|
||||
@@ -113,20 +108,6 @@ class EntityOperations {
|
||||
return AdaptibleMappedEntity.of(entity, context, conversionService);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param source can be {@literal null}.
|
||||
* @return {@literal true} if the given value is an {@literal array}, {@link Collection} or {@link Iterator}.
|
||||
* @since 3.2
|
||||
*/
|
||||
static boolean isCollectionLike(@Nullable Object source) {
|
||||
|
||||
if (source == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ObjectUtils.isArray(source) || source instanceof Collection || source instanceof Iterator;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param entityClass should not be null.
|
||||
* @return the {@link MongoPersistentEntity#getCollection() collection name}.
|
||||
@@ -365,14 +346,11 @@ class EntityOperations {
|
||||
Number getVersion();
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
private static class UnmappedEntity<T extends Map<String, Object>> implements AdaptibleEntity<T> {
|
||||
|
||||
private final T map;
|
||||
|
||||
protected UnmappedEntity(T map) {
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName()
|
||||
@@ -482,7 +460,7 @@ class EntityOperations {
|
||||
|
||||
private static class SimpleMappedEntity<T extends Map<String, Object>> extends UnmappedEntity<T> {
|
||||
|
||||
protected SimpleMappedEntity(T map) {
|
||||
SimpleMappedEntity(T map) {
|
||||
super(map);
|
||||
}
|
||||
|
||||
@@ -505,19 +483,12 @@ class EntityOperations {
|
||||
}
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor(access = AccessLevel.PROTECTED)
|
||||
private static class MappedEntity<T> implements Entity<T> {
|
||||
|
||||
private final MongoPersistentEntity<?> entity;
|
||||
private final IdentifierAccessor idAccessor;
|
||||
private final PersistentPropertyAccessor<T> propertyAccessor;
|
||||
|
||||
protected MappedEntity(MongoPersistentEntity<?> entity, IdentifierAccessor idAccessor,
|
||||
PersistentPropertyAccessor<T> propertyAccessor) {
|
||||
|
||||
this.entity = entity;
|
||||
this.idAccessor = idAccessor;
|
||||
this.propertyAccessor = propertyAccessor;
|
||||
}
|
||||
private final @NonNull MongoPersistentEntity<?> entity;
|
||||
private final @NonNull IdentifierAccessor idAccessor;
|
||||
private final @NonNull PersistentPropertyAccessor<T> propertyAccessor;
|
||||
|
||||
private static <T> MappedEntity<T> of(T bean,
|
||||
MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> context) {
|
||||
@@ -782,36 +753,17 @@ class EntityOperations {
|
||||
* @return
|
||||
*/
|
||||
Optional<Collation> getCollation(Query query);
|
||||
|
||||
/**
|
||||
* Derive the applicable {@link CollectionOptions} for the given type.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
CollectionOptions getCollectionOptions();
|
||||
|
||||
/**
|
||||
* Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially
|
||||
* annotated field names.
|
||||
*
|
||||
* @param options must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.3
|
||||
*/
|
||||
TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom
|
||||
* conversions).
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
enum UntypedOperations implements TypedOperations<Object> {
|
||||
|
||||
INSTANCE;
|
||||
|
||||
UntypedOperations() {}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
public static <T> TypedOperations<T> instance() {
|
||||
return (TypedOperations) INSTANCE;
|
||||
@@ -839,16 +791,6 @@ class EntityOperations {
|
||||
|
||||
return query.getCollation();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
return CollectionOptions.empty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) {
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -856,13 +798,10 @@ class EntityOperations {
|
||||
*
|
||||
* @param <T>
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
static class TypedEntityOperations<T> implements TypedOperations<T> {
|
||||
|
||||
private final MongoPersistentEntity<T> entity;
|
||||
|
||||
protected TypedEntityOperations(MongoPersistentEntity<T> entity) {
|
||||
this.entity = entity;
|
||||
}
|
||||
private final @NonNull MongoPersistentEntity<T> entity;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -886,58 +825,6 @@ class EntityOperations {
|
||||
|
||||
return Optional.ofNullable(entity.getCollation());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionOptions getCollectionOptions() {
|
||||
|
||||
CollectionOptions collectionOptions = CollectionOptions.empty();
|
||||
if (entity.hasCollation()) {
|
||||
collectionOptions = collectionOptions.collation(entity.getCollation());
|
||||
}
|
||||
|
||||
if (entity.isAnnotationPresent(TimeSeries.class)) {
|
||||
|
||||
TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class);
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.timeField()) == null) {
|
||||
throw new MappingException(String.format("Time series field '%s' does not exist in type %s",
|
||||
timeSeries.timeField(), entity.getName()));
|
||||
}
|
||||
|
||||
TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField());
|
||||
if (StringUtils.hasText(timeSeries.metaField())) {
|
||||
|
||||
if (entity.getPersistentProperty(timeSeries.metaField()) == null) {
|
||||
throw new MappingException(
|
||||
String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName()));
|
||||
}
|
||||
|
||||
options = options.metaField(timeSeries.metaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(timeSeries.granularity())) {
|
||||
options = options.granularity(timeSeries.granularity());
|
||||
}
|
||||
collectionOptions = collectionOptions.timeSeries(options);
|
||||
}
|
||||
|
||||
return collectionOptions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) {
|
||||
|
||||
TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField()));
|
||||
|
||||
if (StringUtils.hasText(source.getMetaField())) {
|
||||
target = target.metaField(mappedNameOrDefault(source.getMetaField()));
|
||||
}
|
||||
return target.granularity(source.getGranularity());
|
||||
}
|
||||
|
||||
private String mappedNameOrDefault(String name) {
|
||||
MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name);
|
||||
return persistentProperty != null ? persistentProperty.getFieldName() : name;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -15,10 +15,16 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
@@ -29,13 +35,10 @@ import org.springframework.util.StringUtils;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableAggregationOperationSupport implements ExecutableAggregationOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableAggregationOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -53,21 +56,15 @@ class ExecutableAggregationOperationSupport implements ExecutableAggregationOper
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableAggregationSupport<T>
|
||||
implements AggregationWithAggregation<T>, ExecutableAggregation<T>, TerminatingAggregation<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final Aggregation aggregation;
|
||||
private final String collection;
|
||||
|
||||
public ExecutableAggregationSupport(MongoTemplate template, Class<T> domainType, Aggregation aggregation,
|
||||
String collection) {
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.aggregation = aggregation;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
@Nullable Aggregation aggregation;
|
||||
@Nullable String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -125,11 +125,6 @@ public interface ExecutableFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <br />
|
||||
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation
|
||||
* execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard,
|
||||
* session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link MongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return total number of matching elements.
|
||||
*/
|
||||
|
||||
@@ -15,6 +15,12 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
@@ -31,7 +37,6 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.client.FindIterable;
|
||||
|
||||
/**
|
||||
@@ -41,15 +46,12 @@ import com.mongodb.client.FindIterable;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableFindOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -68,23 +70,16 @@ class ExecutableFindOperationSupport implements ExecutableFindOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableFindSupport<T>
|
||||
implements ExecutableFind<T>, FindWithCollection<T>, FindWithProjection<T>, FindWithQuery<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
@Nullable private final String collection;
|
||||
private final Query query;
|
||||
|
||||
ExecutableFindSupport(MongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
String collection, Query query) {
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<?> domainType;
|
||||
Class<T> returnType;
|
||||
@Nullable String collection;
|
||||
Query query;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
@@ -32,13 +37,10 @@ import com.mongodb.bulk.BulkWriteResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableInsertOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -56,20 +58,14 @@ class ExecutableInsertOperationSupport implements ExecutableInsertOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableInsertSupport<T> implements ExecutableInsert<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
@Nullable private final String collection;
|
||||
@Nullable private final BulkMode bulkMode;
|
||||
|
||||
ExecutableInsertSupport(MongoTemplate template, Class<T> domainType, String collection, BulkMode bulkMode) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.collection = collection;
|
||||
this.bulkMode = bulkMode;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
@Nullable String collection;
|
||||
@Nullable BulkMode bulkMode;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
@@ -29,17 +32,12 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableMapReduceOperationSupport(MongoTemplate template) {
|
||||
|
||||
Assert.notNull(template, "Template must not be null!");
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -31,15 +36,12 @@ import com.mongodb.client.result.DeleteResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate tempate;
|
||||
|
||||
public ExecutableRemoveOperationSupport(MongoTemplate tempate) {
|
||||
this.tempate = tempate;
|
||||
}
|
||||
private final @NonNull MongoTemplate tempate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -57,19 +59,14 @@ class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableRemoveSupport<T> implements ExecutableRemove<T>, RemoveWithCollection<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final Query query;
|
||||
@Nullable private final String collection;
|
||||
|
||||
public ExecutableRemoveSupport(MongoTemplate template, Class<T> domainType, Query query, String collection) {
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.query = query;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
Query query;
|
||||
@Nullable String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -89,7 +89,7 @@ public interface ExecutableUpdateOperation {
|
||||
|
||||
/**
|
||||
* Trigger
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* execution by calling one of the terminating methods.
|
||||
*
|
||||
* @author Mark Paluch
|
||||
|
||||
@@ -15,6 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
@@ -30,15 +35,12 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Mark Paluch
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final MongoTemplate template;
|
||||
|
||||
ExecutableUpdateOperationSupport(MongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull MongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -56,34 +58,21 @@ class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ExecutableUpdateSupport<T>
|
||||
implements ExecutableUpdate<T>, UpdateWithCollection<T>, UpdateWithQuery<T>, TerminatingUpdate<T>,
|
||||
FindAndReplaceWithOptions<T>, TerminatingFindAndReplace<T>, FindAndReplaceWithProjection<T> {
|
||||
|
||||
private final MongoTemplate template;
|
||||
private final Class domainType;
|
||||
private final Query query;
|
||||
@Nullable private final UpdateDefinition update;
|
||||
@Nullable private final String collection;
|
||||
@Nullable private final FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable private final FindAndReplaceOptions findAndReplaceOptions;
|
||||
@Nullable private final Object replacement;
|
||||
private final Class<T> targetType;
|
||||
|
||||
ExecutableUpdateSupport(MongoTemplate template, Class domainType, Query query, UpdateDefinition update,
|
||||
String collection, FindAndModifyOptions findAndModifyOptions, FindAndReplaceOptions findAndReplaceOptions,
|
||||
Object replacement, Class<T> targetType) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.query = query;
|
||||
this.update = update;
|
||||
this.collection = collection;
|
||||
this.findAndModifyOptions = findAndModifyOptions;
|
||||
this.findAndReplaceOptions = findAndReplaceOptions;
|
||||
this.replacement = replacement;
|
||||
this.targetType = targetType;
|
||||
}
|
||||
@NonNull MongoTemplate template;
|
||||
@NonNull Class domainType;
|
||||
Query query;
|
||||
@Nullable UpdateDefinition update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@Nullable Object replacement;
|
||||
@NonNull Class<T> targetType;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -17,7 +17,7 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
/**
|
||||
* Options for
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>.
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>.
|
||||
* <br />
|
||||
* Defaults to
|
||||
* <dl>
|
||||
|
||||
@@ -115,10 +115,6 @@ abstract class IndexConverters {
|
||||
ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class)));
|
||||
}
|
||||
|
||||
if (indexOptions.containsKey("wildcardProjection")) {
|
||||
ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class));
|
||||
}
|
||||
|
||||
return ops;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
@@ -24,6 +27,8 @@ import org.springframework.data.mongodb.core.query.Update;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.util.StreamUtils;
|
||||
|
||||
import com.mongodb.client.model.Filters;
|
||||
|
||||
/**
|
||||
* A MongoDB document in its mapped state. I.e. after a source document has been mapped using mapping information of the
|
||||
* entity the source document was supposed to represent.
|
||||
@@ -31,20 +36,13 @@ import org.springframework.data.util.StreamUtils;
|
||||
* @author Oliver Gierke
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor(staticName = "of")
|
||||
public class MappedDocument {
|
||||
|
||||
private static final String ID_FIELD = "_id";
|
||||
private static final Document ID_ONLY_PROJECTION = new Document(ID_FIELD, 1);
|
||||
|
||||
private final Document document;
|
||||
|
||||
private MappedDocument(Document document) {
|
||||
this.document = document;
|
||||
}
|
||||
|
||||
public static MappedDocument of(Document document) {
|
||||
return new MappedDocument(document);
|
||||
}
|
||||
private final @Getter Document document;
|
||||
|
||||
public static Document getIdOnlyProjection() {
|
||||
return ID_ONLY_PROJECTION;
|
||||
@@ -93,10 +91,6 @@ public class MappedDocument {
|
||||
return new MappedUpdate(Update.fromDocument(document, ID_FIELD));
|
||||
}
|
||||
|
||||
public Document getDocument() {
|
||||
return this.document;
|
||||
}
|
||||
|
||||
/**
|
||||
* An {@link UpdateDefinition} that indicates that the {@link #getUpdateObject() update object} has already been
|
||||
* mapped to the specific domain type.
|
||||
@@ -156,14 +150,5 @@ public class MappedDocument {
|
||||
public List<ArrayFilter> getArrayFilters() {
|
||||
return delegate.getArrayFilters();
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters()
|
||||
*/
|
||||
@Override
|
||||
public boolean hasArrayFilters() {
|
||||
return delegate.hasArrayFilters();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,20 +20,13 @@ import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.Field;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type;
|
||||
@@ -41,12 +34,10 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder;
|
||||
import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject;
|
||||
import org.springframework.data.util.ClassTypeInformation;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
/**
|
||||
* {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain
|
||||
@@ -61,7 +52,6 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
private final MongoConverter converter;
|
||||
private final MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
private final Predicate<JsonSchemaPropertyContext> filter;
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link MappingMongoJsonSchemaCreator}.
|
||||
@@ -71,24 +61,10 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter) {
|
||||
|
||||
this(converter, (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter.getMappingContext(),
|
||||
(property) -> true);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MappingMongoJsonSchemaCreator(MongoConverter converter,
|
||||
MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext,
|
||||
Predicate<JsonSchemaPropertyContext> filter) {
|
||||
|
||||
Assert.notNull(converter, "Converter must not be null!");
|
||||
this.converter = converter;
|
||||
this.mappingContext = mappingContext;
|
||||
this.filter = filter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter) {
|
||||
return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter);
|
||||
this.mappingContext = (MappingContext<MongoPersistentEntity<?>, MongoPersistentProperty>) converter
|
||||
.getMappingContext();
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -101,29 +77,11 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
MongoPersistentEntity<?> entity = mappingContext.getRequiredPersistentEntity(type);
|
||||
MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder();
|
||||
|
||||
{
|
||||
Encrypted encrypted = entity.findAnnotation(Encrypted.class);
|
||||
if (encrypted != null) {
|
||||
|
||||
Document encryptionMetadata = new Document();
|
||||
|
||||
Collection<Object> encryptionKeyIds = entity.getEncryptionKeyIds();
|
||||
if (!CollectionUtils.isEmpty(encryptionKeyIds)) {
|
||||
encryptionMetadata.append("keyId", encryptionKeyIds);
|
||||
}
|
||||
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
encryptionMetadata.append("algorithm", encrypted.algorithm());
|
||||
}
|
||||
|
||||
schemaBuilder.encryptionMetadata(encryptionMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
List<JsonSchemaProperty> schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity);
|
||||
schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0]));
|
||||
|
||||
return schemaBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
private List<JsonSchemaProperty> computePropertiesForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -135,11 +93,6 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
|
||||
List<MongoPersistentProperty> currentPath = new ArrayList<>(path);
|
||||
|
||||
if (!filter.test(new PropertyContext(
|
||||
currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")), nested))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (path.contains(nested)) { // cycle guard
|
||||
schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)),
|
||||
Object.class, false));
|
||||
@@ -161,88 +114,21 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
Class<?> rawTargetType = computeTargetType(property); // target type before conversion
|
||||
Class<?> targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type
|
||||
|
||||
if (!isCollection(property) && property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
if (property.isEntity() && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) {
|
||||
return createObjectSchemaPropertyForEntity(path, property, required);
|
||||
}
|
||||
|
||||
String fieldName = computePropertyFieldName(property);
|
||||
|
||||
JsonSchemaProperty schemaProperty;
|
||||
if (isCollection(property)) {
|
||||
schemaProperty = createArraySchemaProperty(fieldName, property, required);
|
||||
if (property.isCollectionLike()) {
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
} else if (property.isMap()) {
|
||||
schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
return createSchemaProperty(fieldName, Type.objectType(), required);
|
||||
} else if (ClassUtils.isAssignable(Enum.class, targetType)) {
|
||||
schemaProperty = createEnumSchemaProperty(fieldName, targetType, required);
|
||||
} else {
|
||||
schemaProperty = createSchemaProperty(fieldName, targetType, required);
|
||||
return createEnumSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
return applyEncryptionDataIfNecessary(property, schemaProperty);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property,
|
||||
boolean required) {
|
||||
|
||||
ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName);
|
||||
|
||||
if (isSpecificType(property)) {
|
||||
schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty);
|
||||
}
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(schemaProperty, required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property,
|
||||
ArrayJsonSchemaProperty schemaProperty) {
|
||||
|
||||
MongoPersistentEntity<?> persistentEntity = mappingContext
|
||||
.getPersistentEntity(property.getTypeInformation().getRequiredComponentType());
|
||||
|
||||
if (persistentEntity != null) {
|
||||
|
||||
List<JsonSchemaProperty> nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity);
|
||||
|
||||
if (nestedProperties.isEmpty()) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
return schemaProperty
|
||||
.items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0])));
|
||||
}
|
||||
|
||||
if (ClassUtils.isAssignable(Enum.class, property.getActualType())) {
|
||||
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) property.getActualType());
|
||||
|
||||
return schemaProperty
|
||||
.items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues));
|
||||
}
|
||||
|
||||
return schemaProperty.items(JsonSchemaObject.of(property.getActualType()));
|
||||
}
|
||||
|
||||
private boolean isSpecificType(MongoPersistentProperty property) {
|
||||
return !ClassTypeInformation.OBJECT.equals(property.getTypeInformation().getActualType());
|
||||
}
|
||||
|
||||
private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property,
|
||||
JsonSchemaProperty schemaProperty) {
|
||||
|
||||
Encrypted encrypted = property.findAnnotation(Encrypted.class);
|
||||
if (encrypted == null) {
|
||||
return schemaProperty;
|
||||
}
|
||||
|
||||
EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty);
|
||||
if (StringUtils.hasText(encrypted.algorithm())) {
|
||||
enc = enc.algorithm(encrypted.algorithm());
|
||||
}
|
||||
if (!ObjectUtils.isEmpty(encrypted.keyId())) {
|
||||
enc = enc.keys(property.getEncryptionKeyIds());
|
||||
}
|
||||
return enc;
|
||||
return createSchemaProperty(fieldName, targetType, required);
|
||||
}
|
||||
|
||||
private JsonSchemaProperty createObjectSchemaPropertyForEntity(List<MongoPersistentProperty> path,
|
||||
@@ -256,12 +142,15 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class<?> targetType, boolean required) {
|
||||
|
||||
List<Object> possibleValues = getPossibleEnumValues((Class<Enum>) targetType);
|
||||
List<Object> possibleValues = new ArrayList<>();
|
||||
|
||||
targetType = computeTargetType(targetType, possibleValues);
|
||||
for (Object enumValue : EnumSet.allOf((Class) targetType)) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
targetType = possibleValues.isEmpty() ? targetType : possibleValues.iterator().next().getClass();
|
||||
return createSchemaProperty(fieldName, targetType, required, possibleValues);
|
||||
}
|
||||
|
||||
@@ -272,20 +161,14 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required,
|
||||
Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues);
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private TypedJsonSchemaObject createSchemaObject(Object type, Collection<?> possibleValues) {
|
||||
|
||||
TypedJsonSchemaObject schemaObject = type instanceof Type ? JsonSchemaObject.of(Type.class.cast(type))
|
||||
: JsonSchemaObject.of(Class.class.cast(type));
|
||||
|
||||
if (!CollectionUtils.isEmpty(possibleValues)) {
|
||||
schemaObject = schemaObject.possibleValues(possibleValues);
|
||||
}
|
||||
return schemaObject;
|
||||
|
||||
return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required);
|
||||
}
|
||||
|
||||
private String computePropertyFieldName(PersistentProperty property) {
|
||||
@@ -316,53 +199,12 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator {
|
||||
return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType();
|
||||
}
|
||||
|
||||
private static Class<?> computeTargetType(Class<?> fallback, List<Object> possibleValues) {
|
||||
return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass();
|
||||
}
|
||||
|
||||
private <E extends Enum<E>> List<Object> getPossibleEnumValues(Class<E> targetType) {
|
||||
|
||||
EnumSet<E> enumSet = EnumSet.allOf(targetType);
|
||||
List<Object> possibleValues = new ArrayList<>(enumSet.size());
|
||||
|
||||
for (Object enumValue : enumSet) {
|
||||
possibleValues.add(converter.convertToMongoType(enumValue));
|
||||
}
|
||||
|
||||
return possibleValues;
|
||||
}
|
||||
|
||||
private static boolean isCollection(MongoPersistentProperty property) {
|
||||
return property.isCollectionLike() && !property.getType().equals(byte[].class);
|
||||
}
|
||||
|
||||
static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) {
|
||||
return required ? JsonSchemaProperty.required(property) : property;
|
||||
}
|
||||
|
||||
class PropertyContext implements JsonSchemaPropertyContext {
|
||||
|
||||
private final String path;
|
||||
private final MongoPersistentProperty property;
|
||||
|
||||
public PropertyContext(String path, MongoPersistentProperty property) {
|
||||
this.path = path;
|
||||
this.property = property;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MongoPersistentProperty getProperty() {
|
||||
if (!required) {
|
||||
return property;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property) {
|
||||
return (MongoPersistentEntity<T>) mappingContext.getPersistentEntity(property);
|
||||
}
|
||||
return JsonSchemaProperty.required(property);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,6 @@ import com.mongodb.MongoClientSettings.Builder;
|
||||
import com.mongodb.ReadConcern;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.ServerAddress;
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.connection.ClusterConnectionMode;
|
||||
import com.mongodb.connection.ClusterType;
|
||||
@@ -114,7 +113,6 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
// encryption and retry
|
||||
|
||||
private @Nullable AutoEncryptionSettings autoEncryptionSettings;
|
||||
private @Nullable ServerApi serverApi;
|
||||
|
||||
/**
|
||||
* @param socketConnectTimeoutMS in msec
|
||||
@@ -397,15 +395,6 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
this.autoEncryptionSettings = autoEncryptionSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param serverApi can be {@literal null}.
|
||||
* @see MongoClientSettings.Builder#serverApi(ServerApi)
|
||||
* @since 3.3
|
||||
*/
|
||||
public void setServerApi(@Nullable ServerApi serverApi) {
|
||||
this.serverApi = serverApi;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return MongoClientSettings.class;
|
||||
@@ -487,11 +476,9 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean<MongoCli
|
||||
if (retryWrites != null) {
|
||||
builder = builder.retryWrites(retryWrites);
|
||||
}
|
||||
|
||||
if (uUidRepresentation != null) {
|
||||
builder = builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
if (serverApi != null) {
|
||||
builder = builder.serverApi(serverApi);
|
||||
builder.uuidRepresentation(uUidRepresentation);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
|
||||
import org.springframework.aop.framework.ProxyFactory;
|
||||
import org.springframework.dao.DataAccessException;
|
||||
import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
@@ -22,7 +24,6 @@ import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.WriteConcern;
|
||||
@@ -33,7 +34,7 @@ import com.mongodb.client.MongoDatabase;
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -170,15 +171,11 @@ public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFac
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
static final class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory {
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory {
|
||||
|
||||
private final ClientSession session;
|
||||
private final MongoDatabaseFactory delegate;
|
||||
|
||||
public ClientSessionBoundMongoDbFactory(ClientSession session, MongoDatabaseFactory delegate) {
|
||||
this.session = session;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
ClientSession session;
|
||||
MongoDatabaseFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -259,40 +256,5 @@ public abstract class MongoDatabaseFactorySupport<C> implements MongoDatabaseFac
|
||||
|
||||
return targetType.cast(factory.getProxy(target.getClass().getClassLoader()));
|
||||
}
|
||||
|
||||
public ClientSession getSession() {
|
||||
return this.session;
|
||||
}
|
||||
|
||||
public MongoDatabaseFactory getDelegate() {
|
||||
return this.delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.session, that.session)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.delegate, that.delegate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(this.session);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + ", delegate="
|
||||
+ this.getDelegate() + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ import org.springframework.dao.support.PersistenceExceptionTranslator;
|
||||
/**
|
||||
* Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as
|
||||
* database name and exception translator.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Not intended to be used directly.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
|
||||
@@ -15,23 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.springframework.data.mapping.PersistentProperty;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoConverter;
|
||||
import org.springframework.data.mongodb.core.convert.MongoCustomConversions;
|
||||
import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver;
|
||||
import org.springframework.data.mongodb.core.mapping.Encrypted;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes;
|
||||
import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable;
|
||||
import org.springframework.data.mongodb.core.schema.JsonSchemaProperty;
|
||||
import org.springframework.data.mongodb.core.schema.MongoJsonSchema;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@@ -40,7 +24,6 @@ import org.springframework.util.Assert;
|
||||
* following mapping rules.
|
||||
* <p>
|
||||
* <strong>Required Properties</strong>
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>Properties of primitive type</li>
|
||||
* </ul>
|
||||
@@ -62,8 +45,7 @@ import org.springframework.util.Assert;
|
||||
* {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into
|
||||
* {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more
|
||||
* specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation.
|
||||
|
||||
* {@link Encrypted} properties will contain {@literal encrypt} information.
|
||||
* </p>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 2.2
|
||||
@@ -78,88 +60,6 @@ public interface MongoJsonSchemaCreator {
|
||||
*/
|
||||
MongoJsonSchema createSchemaFor(Class<?> type);
|
||||
|
||||
/**
|
||||
* Filter matching {@link JsonSchemaProperty properties}.
|
||||
*
|
||||
* @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
MongoJsonSchemaCreator filter(Predicate<JsonSchemaPropertyContext> filter);
|
||||
|
||||
/**
|
||||
* The context in which a specific {@link #getProperty()} is encountered during schema creation.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
interface JsonSchemaPropertyContext {
|
||||
|
||||
/**
|
||||
* The path to a given field/property in dot notation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
String getPath();
|
||||
|
||||
/**
|
||||
* The current property.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
MongoPersistentProperty getProperty();
|
||||
|
||||
/**
|
||||
* Obtain the {@link MongoPersistentEntity} for a given property.
|
||||
*
|
||||
* @param property must not be {@literal null}.
|
||||
* @param <T>
|
||||
* @return {@literal null} if the property is not an entity. It is nevertheless recommend to check
|
||||
* {@link PersistentProperty#isEntity()} first.
|
||||
*/
|
||||
@Nullable
|
||||
<T> MongoPersistentEntity<T> resolveEntity(MongoPersistentProperty property);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones.
|
||||
*
|
||||
* @return new instance of {@link Predicate}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static Predicate<JsonSchemaPropertyContext> encryptedOnly() {
|
||||
|
||||
return new Predicate<JsonSchemaPropertyContext>() {
|
||||
|
||||
// cycle guard
|
||||
private final Set<MongoPersistentProperty> seen = new HashSet<>();
|
||||
|
||||
@Override
|
||||
public boolean test(JsonSchemaPropertyContext context) {
|
||||
return extracted(context.getProperty(), context);
|
||||
}
|
||||
|
||||
private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) {
|
||||
if (property.isAnnotationPresent(Encrypted.class)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!property.isEntity() || seen.contains(property)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
seen.add(property);
|
||||
|
||||
for (MongoPersistentProperty nested : context.resolveEntity(property)) {
|
||||
if (extracted(nested, context)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given
|
||||
* {@link MongoConverter}.
|
||||
@@ -172,41 +72,4 @@ public interface MongoJsonSchemaCreator {
|
||||
Assert.notNull(mongoConverter, "MongoConverter must not be null!");
|
||||
return new MappingMongoJsonSchemaCreator(mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential
|
||||
* {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}.
|
||||
*
|
||||
* @param mappingContext must not be {@literal null}.
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create(MappingContext mappingContext) {
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We
|
||||
* recommend to use {@link #create(MappingContext)}.
|
||||
*
|
||||
* @return new instance of {@link MongoJsonSchemaCreator}.
|
||||
* @since 3.3
|
||||
*/
|
||||
static MongoJsonSchemaCreator create() {
|
||||
|
||||
MongoMappingContext mappingContext = new MongoMappingContext();
|
||||
mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER);
|
||||
mappingContext.afterPropertiesSet();
|
||||
|
||||
MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext);
|
||||
converter.setCustomConversions(MongoCustomConversions.create(config -> {}));
|
||||
converter.afterPropertiesSet();
|
||||
|
||||
return create(converter);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but
|
||||
* a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK
|
||||
* proxy).
|
||||
* <br />
|
||||
* <p/>
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -125,7 +125,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes a {@link DbCallback} translating any exceptions as necessary.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not
|
||||
@@ -138,7 +138,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the entity collection of the specified class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
@@ -151,7 +151,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link CollectionCallback} on the collection of the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be
|
||||
@@ -176,7 +176,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use the
|
||||
* {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}.
|
||||
*
|
||||
@@ -212,7 +212,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -300,7 +300,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* is created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -310,7 +310,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the name of the collection. Must not be {@literal null}.
|
||||
@@ -320,7 +320,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a given name exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -330,7 +330,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the name indicated by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}.
|
||||
@@ -339,7 +339,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection to drop/delete.
|
||||
@@ -403,10 +403,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the collection used by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -417,10 +417,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a list of objects of type T from the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -539,11 +539,11 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name
|
||||
* of the inputCollection is derived from the inputType of the aggregation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -557,10 +557,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -576,10 +576,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that
|
||||
* needs to be closed. The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling
|
||||
* explanation mode will throw an {@link IllegalArgumentException}.
|
||||
*
|
||||
@@ -702,10 +702,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
|
||||
* specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -720,10 +720,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -768,10 +768,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -784,10 +784,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a List of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -881,7 +881,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -897,7 +897,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -914,7 +914,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -934,7 +934,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
<T> T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify </a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify <a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -957,7 +957,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* The collection name is derived from the {@literal replacement} type. <br />
|
||||
@@ -977,7 +977,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document.<br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
@@ -997,7 +997,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1018,7 +1018,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1041,7 +1041,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1066,7 +1066,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1094,7 +1094,7 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account.<br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -1120,9 +1120,9 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
* database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1137,10 +1137,10 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type. The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1160,12 +1160,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1182,12 +1176,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -1196,35 +1184,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
*/
|
||||
long count(Query query, String collectionName);
|
||||
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
* @since 3.1
|
||||
*/
|
||||
default long estimatedCount(Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Entity class must not be null!");
|
||||
return estimatedCount(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @return the estimated number of documents.
|
||||
* @since 3.1
|
||||
*/
|
||||
long estimatedCount(String collectionName);
|
||||
|
||||
/**
|
||||
* Returns the number of documents for the given {@link Query} by querying the given collection using the given entity
|
||||
* class to map the given {@link Query}. <br />
|
||||
@@ -1232,12 +1191,6 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -1249,39 +1202,34 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T insert(T objectToSave);
|
||||
|
||||
/**
|
||||
* Insert the object into the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T insert(T objectToSave, String collectionName);
|
||||
|
||||
@@ -1315,42 +1263,37 @@ public interface MongoOperations extends FluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T save(T objectToSave);
|
||||
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> T save(T objectToSave, String collectionName);
|
||||
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
/*
|
||||
* Copyright 2021 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* https://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.springframework.beans.factory.FactoryBean;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.ServerApi;
|
||||
import com.mongodb.ServerApi.Builder;
|
||||
import com.mongodb.ServerApiVersion;
|
||||
|
||||
/**
|
||||
* {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public class MongoServerApiFactoryBean implements FactoryBean<ServerApi> {
|
||||
|
||||
private String version;
|
||||
private @Nullable Boolean deprecationErrors;
|
||||
private @Nullable Boolean strict;
|
||||
|
||||
/**
|
||||
* @param version the version string either as the enum name or the server version value.
|
||||
* @see ServerApiVersion
|
||||
*/
|
||||
public void setVersion(String version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param deprecationErrors
|
||||
* @see ServerApi.Builder#deprecationErrors(boolean)
|
||||
*/
|
||||
public void setDeprecationErrors(@Nullable Boolean deprecationErrors) {
|
||||
this.deprecationErrors = deprecationErrors;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param strict
|
||||
* @see ServerApi.Builder#strict(boolean)
|
||||
*/
|
||||
public void setStrict(@Nullable Boolean strict) {
|
||||
this.strict = strict;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public ServerApi getObject() throws Exception {
|
||||
|
||||
Builder builder = ServerApi.builder().version(version());
|
||||
|
||||
if (deprecationErrors != null) {
|
||||
builder = builder.deprecationErrors(deprecationErrors);
|
||||
}
|
||||
if (strict != null) {
|
||||
builder = builder.strict(strict);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Class<?> getObjectType() {
|
||||
return ServerApi.class;
|
||||
}
|
||||
|
||||
private ServerApiVersion version() {
|
||||
try {
|
||||
// lookup by name eg. 'V1'
|
||||
return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// or just the version number, eg. just '1'
|
||||
return ServerApiVersion.findByValue(version);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,11 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.RoundingMode;
|
||||
@@ -47,7 +52,6 @@ import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.GeoResults;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.callback.EntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.MongoDatabaseFactory;
|
||||
@@ -56,7 +60,6 @@ import org.springframework.data.mongodb.SessionSynchronization;
|
||||
import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
|
||||
import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DeleteContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext;
|
||||
@@ -99,12 +102,12 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
import org.springframework.data.util.CloseableIterator;
|
||||
import org.springframework.data.util.Optionals;
|
||||
import org.springframework.jca.cci.core.ConnectionCallback;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ClassUtils;
|
||||
@@ -157,14 +160,22 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Cimon Lucas
|
||||
* @author Michael J. Simons
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Yadhukrishna S Pai
|
||||
* @author Anton Barkan
|
||||
* @author Bartłomiej Mazur
|
||||
*/
|
||||
public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class);
|
||||
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
|
||||
private static final Collection<String> ITERABLE_CLASSES;
|
||||
|
||||
static {
|
||||
|
||||
Set<String> iterableClasses = new HashSet<>();
|
||||
iterableClasses.add(List.class.getName());
|
||||
iterableClasses.add(Collection.class.getName());
|
||||
iterableClasses.add(Iterator.class.getName());
|
||||
|
||||
ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses);
|
||||
}
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -338,7 +349,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
/**
|
||||
* Set the {@link EntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Overrides potentially existing {@link EntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
@@ -598,7 +609,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> MongoCollection<Document> createCollection(Class<T> entityClass) {
|
||||
return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions());
|
||||
return createCollection(entityClass, CollectionOptions.empty());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -707,17 +718,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return indexOps(collectionName, null);
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.String)
|
||||
*/
|
||||
public IndexOperations indexOps(String collectionName, @Nullable Class<?> type) {
|
||||
return new DefaultIndexOperations(this, collectionName, type);
|
||||
public IndexOperations indexOps(String collectionName) {
|
||||
return new DefaultIndexOperations(this, collectionName, null);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -725,7 +731,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.Class)
|
||||
*/
|
||||
public IndexOperations indexOps(Class<?> entityClass) {
|
||||
return indexOps(getCollectionName(entityClass), entityClass);
|
||||
return new DefaultIndexOperations(this, getCollectionName(entityClass), entityClass);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -757,6 +763,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
new BulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper,
|
||||
eventPublisher, entityCallbacks));
|
||||
|
||||
operations.setExceptionTranslator(exceptionTranslator);
|
||||
operations.setDefaultWriteConcern(writeConcern);
|
||||
|
||||
return operations;
|
||||
@@ -974,7 +981,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
for (Document element : results) {
|
||||
|
||||
GeoResult<T> geoResult = callback.doWith(element);
|
||||
aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue()));
|
||||
aggregate = aggregate.add(new BigDecimal(geoResult.getDistance().getValue()));
|
||||
result.add(geoResult);
|
||||
}
|
||||
|
||||
@@ -1133,19 +1140,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#estimatedCount(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public long estimatedCount(String collectionName) {
|
||||
return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions());
|
||||
}
|
||||
|
||||
protected long doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) {
|
||||
return execute(collectionName, collection -> collection.estimatedDocumentCount(options));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object)
|
||||
@@ -1174,28 +1168,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
return (T) doInsert(collectionName, objectToSave, this.mongoConverter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @deprecated since 3.2. Call {@link #ensureNotCollectionLike(Object)} instead.
|
||||
*/
|
||||
protected void ensureNotIterable(@Nullable Object source) {
|
||||
ensureNotCollectionLike(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @since 3.2.
|
||||
*/
|
||||
protected void ensureNotCollectionLike(@Nullable Object source) {
|
||||
|
||||
if (EntityOperations.isCollectionLike(source)) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
protected void ensureNotIterable(@Nullable Object o) {
|
||||
if (o != null) {
|
||||
if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1378,13 +1355,13 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
Assert.notNull(objectToSave, "Object to save must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
ensureNotCollectionLike(objectToSave);
|
||||
|
||||
AdaptibleEntity<T> source = operations.forEntity(objectToSave, mongoConverter.getConversionService());
|
||||
|
||||
return source.isVersionedEntity() //
|
||||
? doSaveVersioned(source, collectionName) //
|
||||
: (T) doSave(collectionName, objectToSave, this.mongoConverter);
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@@ -1992,7 +1969,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
|
||||
return aggregate(aggregation, inputCollectionName, outputType, null);
|
||||
AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(),
|
||||
mappingContext, queryMapper);
|
||||
return aggregate(aggregation, inputCollectionName, outputType, context);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -2002,7 +1981,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
public <O> AggregationResults<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
|
||||
return aggregate(aggregation, getCollectionName(inputType), outputType,
|
||||
queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext());
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -2109,13 +2088,9 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
|
||||
return doAggregate(aggregation, collectionName, outputType,
|
||||
queryOperations.createAggregation(aggregation, context));
|
||||
}
|
||||
|
||||
private <O> AggregationResults<O> doAggregate(Aggregation aggregation, String collectionName, Class<O> outputType,
|
||||
AggregationDefinition context) {
|
||||
return doAggregate(aggregation, collectionName, outputType, context.getAggregationOperationContext());
|
||||
AggregationOperationContext contextToUse = new AggregationUtil(queryMapper, mappingContext)
|
||||
.prepareAggregationContext(aggregation, context);
|
||||
return doAggregate(aggregation, collectionName, outputType, contextToUse);
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@@ -2166,7 +2141,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
options.getComment().ifPresent(aggregateIterable::comment);
|
||||
options.getHint().ifPresent(aggregateIterable::hint);
|
||||
|
||||
if (options.hasExecutionTimeLimit()) {
|
||||
aggregateIterable = aggregateIterable.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS);
|
||||
@@ -2203,10 +2177,11 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming!");
|
||||
|
||||
AggregationDefinition aggregationDefinition = queryOperations.createAggregation(aggregation, context);
|
||||
AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
AggregationOperationContext rootContext = aggregationUtil.prepareAggregationContext(aggregation, context);
|
||||
|
||||
AggregationOptions options = aggregation.getOptions();
|
||||
List<Document> pipeline = aggregationDefinition.getAggregationPipeline();
|
||||
List<Document> pipeline = aggregationUtil.createPipeline(aggregation, rootContext);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName);
|
||||
@@ -2224,7 +2199,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
|
||||
options.getComment().ifPresent(cursor::comment);
|
||||
options.getHint().ifPresent(cursor::hint);
|
||||
|
||||
Class<?> domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType()
|
||||
: null;
|
||||
@@ -2435,20 +2409,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
co.validationOptions(options);
|
||||
}
|
||||
|
||||
if (collectionOptions.containsKey("timeseries")) {
|
||||
|
||||
Document timeSeries = collectionOptions.get("timeseries", Document.class);
|
||||
com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(
|
||||
timeSeries.getString("timeField"));
|
||||
if (timeSeries.containsKey("metaField")) {
|
||||
options.metaField(timeSeries.getString("metaField"));
|
||||
}
|
||||
if (timeSeries.containsKey("granularity")) {
|
||||
options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase()));
|
||||
}
|
||||
co.timeSeriesOptions(options);
|
||||
}
|
||||
|
||||
db.createCollection(collectionName, co);
|
||||
|
||||
MongoCollection<Document> coll = db.getCollection(collectionName, Document.class);
|
||||
@@ -2603,19 +2563,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() //
|
||||
.ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType))));
|
||||
|
||||
collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions)
|
||||
.ifPresent(it -> {
|
||||
|
||||
Document timeseries = new Document("timeField", it.getTimeField());
|
||||
if (StringUtils.hasText(it.getMetaField())) {
|
||||
timeseries.append("metaField", it.getMetaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(it.getGranularity())) {
|
||||
timeseries.append("granularity", it.getGranularity().name().toLowerCase());
|
||||
}
|
||||
doc.put("timeseries", timeseries);
|
||||
});
|
||||
}
|
||||
|
||||
return doc;
|
||||
@@ -2663,7 +2610,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter.
|
||||
* The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query document is specified as a standard Document and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
@@ -2779,24 +2726,25 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* Internal method using callbacks to do queries against the datastore that requires reading a single object from a
|
||||
* collection of objects. It will take the following steps
|
||||
* <ol>
|
||||
* <li>Execute the given {@link CollectionCallback} for a {@link Document}.</li>
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link Document}.</li>
|
||||
* <li>Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.</li>
|
||||
* <ol>
|
||||
*
|
||||
* @param <T>
|
||||
* @param collectionCallback the callback to retrieve the {@link Document} with
|
||||
* @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
@Nullable
|
||||
private <T> T executeFindOneInternal(CollectionCallback<Document> collectionCallback,
|
||||
DocumentCallback<T> documentCallback, String collectionName) {
|
||||
DocumentCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName));
|
||||
return document != null ? documentCallback.doWith(document) : null;
|
||||
T result = objectCallback
|
||||
.doWith(collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)));
|
||||
return result;
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
}
|
||||
@@ -2806,7 +2754,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* Internal method using callback to do queries against the datastore that requires reading a collection of objects.
|
||||
* It will take the following steps
|
||||
* <ol>
|
||||
* <li>Execute the given {@link CollectionCallback} for a {@link FindIterable}.</li>
|
||||
* <li>Execute the given {@link ConnectionCallback} for a {@link FindIterable}.</li>
|
||||
* <li>Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if
|
||||
* {@link CursorPreparer} is {@literal null}</li>
|
||||
* <li>Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the
|
||||
@@ -2816,27 +2764,36 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @param <T>
|
||||
* @param collectionCallback the callback to retrieve the {@link FindIterable} with
|
||||
* @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it
|
||||
* @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type
|
||||
* @param collectionName the collection to be queried
|
||||
* @return
|
||||
*/
|
||||
private <T> List<T> executeFindMultiInternal(CollectionCallback<FindIterable<Document>> collectionCallback,
|
||||
CursorPreparer preparer, DocumentCallback<T> documentCallback, String collectionName) {
|
||||
CursorPreparer preparer, DocumentCallback<T> objectCallback, String collectionName) {
|
||||
|
||||
try {
|
||||
|
||||
try (MongoCursor<Document> cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator()) {
|
||||
MongoCursor<Document> cursor = null;
|
||||
|
||||
try {
|
||||
|
||||
cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator();
|
||||
|
||||
List<T> result = new ArrayList<>();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
Document object = cursor.next();
|
||||
result.add(documentCallback.doWith(object));
|
||||
result.add(objectCallback.doWith(object));
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
|
||||
if (cursor != null) {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -2846,12 +2803,23 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private void executeQueryInternal(CollectionCallback<FindIterable<Document>> collectionCallback,
|
||||
CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) {
|
||||
|
||||
try (MongoCursor<Document> cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator()) {
|
||||
try {
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
callbackHandler.processDocument(cursor.next());
|
||||
MongoCursor<Document> cursor = null;
|
||||
|
||||
try {
|
||||
|
||||
cursor = preparer
|
||||
.initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection)
|
||||
.iterator();
|
||||
|
||||
while (cursor.hasNext()) {
|
||||
callbackHandler.processDocument(cursor.next());
|
||||
}
|
||||
} finally {
|
||||
if (cursor != null) {
|
||||
cursor.close();
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
throw potentiallyConvertRuntimeException(e, exceptionTranslator);
|
||||
@@ -2993,17 +2961,12 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private class ExistsCallback implements CollectionCallback<Boolean> {
|
||||
|
||||
private final Document mappedQuery;
|
||||
private final com.mongodb.client.model.Collation collation;
|
||||
|
||||
ExistsCallback(Document mappedQuery, com.mongodb.client.model.Collation collation) {
|
||||
|
||||
this.mappedQuery = mappedQuery;
|
||||
this.collation = collation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException {
|
||||
|
||||
@@ -3025,7 +2988,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final Document sort;
|
||||
private final Optional<Collation> collation;
|
||||
|
||||
FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) {
|
||||
public FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) {
|
||||
|
||||
this.query = query;
|
||||
this.fields = fields;
|
||||
@@ -3051,9 +3014,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final List<Document> arrayFilters;
|
||||
private final FindAndModifyOptions options;
|
||||
|
||||
FindAndModifyCallback(Document query, Document fields, Document sort, Object update, List<Document> arrayFilters,
|
||||
FindAndModifyOptions options) {
|
||||
|
||||
public FindAndModifyCallback(Document query, Document fields, Document sort, Object update,
|
||||
List<Document> arrayFilters, FindAndModifyOptions options) {
|
||||
this.query = query;
|
||||
this.fields = fields;
|
||||
this.sort = sort;
|
||||
@@ -3150,7 +3112,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
interface DocumentCallback<T> {
|
||||
|
||||
T doWith(Document object);
|
||||
@Nullable
|
||||
T doWith(@Nullable Document object);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3161,32 +3124,29 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @author Christoph Strobl
|
||||
* @author Roman Puchkovskiy
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private class ReadDocumentCallback<T> implements DocumentCallback<T> {
|
||||
|
||||
private final EntityReader<? super T, Bson> reader;
|
||||
private final Class<T> type;
|
||||
private final @NonNull EntityReader<? super T, Bson> reader;
|
||||
private final @NonNull Class<T> type;
|
||||
private final String collectionName;
|
||||
|
||||
ReadDocumentCallback(EntityReader<? super T, Bson> reader, Class<T> type, String collectionName) {
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
this.reader = reader;
|
||||
this.type = type;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
T source = null;
|
||||
|
||||
public T doWith(Document document) {
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
T entity = reader.read(type, document);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
if (document != null) {
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
source = reader.read(type, document);
|
||||
}
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
entity = maybeCallAfterConvert(entity, document, collectionName);
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
source = maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
|
||||
return entity;
|
||||
return source;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3198,28 +3158,21 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @param <T>
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private class ProjectingReadCallback<S, T> implements DocumentCallback<T> {
|
||||
|
||||
private final EntityReader<Object, Bson> reader;
|
||||
private final Class<S> entityType;
|
||||
private final Class<T> targetType;
|
||||
private final String collectionName;
|
||||
|
||||
ProjectingReadCallback(EntityReader<Object, Bson> reader, Class<S> entityType, Class<T> targetType,
|
||||
String collectionName) {
|
||||
|
||||
this.reader = reader;
|
||||
this.entityType = entityType;
|
||||
this.targetType = targetType;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
private final @NonNull EntityReader<Object, Bson> reader;
|
||||
private final @NonNull Class<S> entityType;
|
||||
private final @NonNull Class<T> targetType;
|
||||
private final @NonNull String collectionName;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public T doWith(Document document) {
|
||||
@Nullable
|
||||
public T doWith(@Nullable Document document) {
|
||||
|
||||
if (document == null) {
|
||||
return null;
|
||||
@@ -3230,16 +3183,15 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName));
|
||||
|
||||
Object entity = reader.read(typeToRead, document);
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
if (result != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
|
||||
result = maybeCallAfterConvert(result, document, collectionName);
|
||||
}
|
||||
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity;
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName));
|
||||
return (T) maybeCallAfterConvert(result, document, collectionName);
|
||||
return (T) result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3248,7 +3200,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
private final Query query;
|
||||
private final @Nullable Class<?> type;
|
||||
|
||||
QueryCursorPreparer(Query query, @Nullable Class<?> type) {
|
||||
public QueryCursorPreparer(Query query, @Nullable Class<?> type) {
|
||||
|
||||
this.query = query;
|
||||
this.type = type;
|
||||
@@ -3309,10 +3261,6 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize());
|
||||
}
|
||||
|
||||
if (meta.getAllowDiskUse() != null) {
|
||||
cursorToUse = cursorToUse.allowDiskUse(meta.getAllowDiskUse());
|
||||
}
|
||||
|
||||
for (Meta.CursorOption option : meta.getFlags()) {
|
||||
|
||||
switch (option) {
|
||||
@@ -3376,7 +3324,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
this.metric = metric;
|
||||
}
|
||||
|
||||
public GeoResult<T> doWith(Document object) {
|
||||
@Nullable
|
||||
public GeoResult<T> doWith(@Nullable Document object) {
|
||||
|
||||
double distance = Double.NaN;
|
||||
if (object.containsKey(distanceField)) {
|
||||
@@ -3395,6 +3344,7 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
* @author Thomas Darimont
|
||||
* @since 1.7
|
||||
*/
|
||||
@AllArgsConstructor(access = AccessLevel.PACKAGE)
|
||||
static class CloseableIterableCursorAdapter<T> implements CloseableIterator<T> {
|
||||
|
||||
private volatile @Nullable MongoCursor<Document> cursor;
|
||||
@@ -3403,23 +3353,19 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
/**
|
||||
* Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}.
|
||||
*
|
||||
* @param cursor
|
||||
* @param exceptionTranslator
|
||||
* @param objectReadCallback
|
||||
*/
|
||||
CloseableIterableCursorAdapter(MongoIterable<Document> cursor, PersistenceExceptionTranslator exceptionTranslator,
|
||||
DocumentCallback<T> objectReadCallback) {
|
||||
public CloseableIterableCursorAdapter(MongoIterable<Document> cursor,
|
||||
PersistenceExceptionTranslator exceptionTranslator, DocumentCallback<T> objectReadCallback) {
|
||||
|
||||
this.cursor = cursor.iterator();
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.objectReadCallback = objectReadCallback;
|
||||
}
|
||||
|
||||
CloseableIterableCursorAdapter(MongoCursor<Document> cursor, PersistenceExceptionTranslator exceptionTranslator,
|
||||
DocumentCallback<T> objectReadCallback) {
|
||||
|
||||
this.cursor = cursor;
|
||||
this.exceptionTranslator = exceptionTranslator;
|
||||
this.objectReadCallback = objectReadCallback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
|
||||
@@ -3446,7 +3392,8 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
|
||||
try {
|
||||
Document item = cursor.next();
|
||||
return objectReadCallback.doWith(item);
|
||||
T converted = objectReadCallback.doWith(item);
|
||||
return converted;
|
||||
} catch (RuntimeException ex) {
|
||||
throw potentiallyConvertRuntimeException(ex, exceptionTranslator);
|
||||
}
|
||||
@@ -3472,27 +3419,14 @@ public class MongoTemplate implements MongoOperations, ApplicationContextAware,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated since 3.1.4. Use {@link #getMongoDatabaseFactory()} instead.
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
*/
|
||||
@Deprecated
|
||||
public MongoDatabaseFactory getMongoDbFactory() {
|
||||
return getMongoDatabaseFactory();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
* @since 3.1.4
|
||||
*/
|
||||
public MongoDatabaseFactory getMongoDatabaseFactory() {
|
||||
return mongoDbFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the
|
||||
* server through the driver API.
|
||||
* <br />
|
||||
* <p />
|
||||
* The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired
|
||||
* target method matching the actual arguments plus a {@link ClientSession}.
|
||||
*
|
||||
|
||||
@@ -15,6 +15,9 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mapping.SimplePropertyHandler;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -30,14 +33,11 @@ import org.springframework.util.ClassUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor(access = AccessLevel.PACKAGE)
|
||||
class PropertyOperations {
|
||||
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
|
||||
PropertyOperations(MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext) {
|
||||
this.mappingContext = mappingContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for
|
||||
* creating the projection (target) type if the {@code targetType} is a {@literal DTO projection} or a
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@@ -32,17 +31,11 @@ import org.springframework.data.mapping.PropertyPath;
|
||||
import org.springframework.data.mapping.PropertyReferenceException;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
import org.springframework.data.mongodb.CodecRegistryProvider;
|
||||
import org.springframework.data.mongodb.MongoExpression;
|
||||
import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationExpression;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.QueryMapper;
|
||||
import org.springframework.data.mongodb.core.convert.UpdateMapper;
|
||||
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
|
||||
@@ -55,7 +48,6 @@ import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.ProjectionFactory;
|
||||
import org.springframework.data.util.Lazy;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.ClassUtils;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
@@ -202,34 +194,9 @@ class QueryOperations {
|
||||
return new DeleteContext(query, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param inputType fallback mapping type in case of untyped aggregation. Can be {@literal null}.
|
||||
* @return new instance of {@link AggregationDefinition}.
|
||||
* @since 3.2
|
||||
*/
|
||||
AggregationDefinition createAggregation(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
return new AggregationDefinition(aggregation, inputType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link AggregationDefinition} for the given {@link Aggregation}.
|
||||
*
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param aggregationOperationContext the {@link AggregationOperationContext} to use. Can be {@literal null}.
|
||||
* @return new instance of {@link AggregationDefinition}.
|
||||
* @since 3.2
|
||||
*/
|
||||
AggregationDefinition createAggregation(Aggregation aggregation,
|
||||
@Nullable AggregationOperationContext aggregationOperationContext) {
|
||||
return new AggregationDefinition(aggregation, aggregationOperationContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document
|
||||
* representation, mapping field names, as well as determining and applying {@link Collation collations}.
|
||||
* representation, mapping fieldnames, as well as determinging and applying {@link Collation collations}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
@@ -238,7 +205,7 @@ class QueryOperations {
|
||||
private final Query query;
|
||||
|
||||
/**
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be either a {@link Query} or a
|
||||
* Create new a {@link QueryContext} instance from the given {@literal query} (can be eihter a {@link Query} or a
|
||||
* plain {@link Document}.
|
||||
*
|
||||
* @param query can be {@literal null}.
|
||||
@@ -291,21 +258,7 @@ class QueryOperations {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
|
||||
Document fields = new Document();
|
||||
|
||||
for (Entry<String, Object> entry : query.getFieldsObject().entrySet()) {
|
||||
|
||||
if (entry.getValue() instanceof MongoExpression) {
|
||||
|
||||
AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT
|
||||
: new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper);
|
||||
|
||||
fields.put(entry.getKey(), AggregationExpression.from((MongoExpression) entry.getValue()).toDocument(ctx));
|
||||
} else {
|
||||
fields.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
Document fields = query.getFieldsObject();
|
||||
Document mappedFields = fields;
|
||||
|
||||
if (entity == null) {
|
||||
@@ -322,7 +275,7 @@ class QueryOperations {
|
||||
mappingContext.getRequiredPersistentEntity(targetType));
|
||||
}
|
||||
|
||||
if (entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
if (entity != null && entity.hasTextScoreProperty() && !query.getQueryObject().containsKey("$text")) {
|
||||
mappedFields.remove(entity.getTextScoreProperty().getFieldName());
|
||||
}
|
||||
|
||||
@@ -388,8 +341,7 @@ class QueryOperations {
|
||||
}
|
||||
|
||||
@Override
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType,
|
||||
ProjectionFactory projectionFactory) {
|
||||
Document getMappedFields(@Nullable MongoPersistentEntity<?> entity, Class<?> targetType, ProjectionFactory projectionFactory) {
|
||||
return getMappedFields(entity);
|
||||
}
|
||||
|
||||
@@ -613,7 +565,7 @@ class QueryOperations {
|
||||
|
||||
UpdateContext(MappedDocument update, boolean upsert) {
|
||||
|
||||
super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())));
|
||||
super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter()))));
|
||||
this.multi = false;
|
||||
this.upsert = upsert;
|
||||
this.mappedDocument = update;
|
||||
@@ -756,10 +708,10 @@ class QueryOperations {
|
||||
*/
|
||||
List<Document> getUpdatePipeline(@Nullable Class<?> domainType) {
|
||||
|
||||
Class<?> type = domainType != null ? domainType : Object.class;
|
||||
AggregationOperationContext context = domainType != null
|
||||
? new RelaxedTypeBasedAggregationOperationContext(domainType, mappingContext, queryMapper)
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext,
|
||||
queryMapper);
|
||||
return aggregationUtil.createPipeline((AggregationUpdate) update, context);
|
||||
}
|
||||
|
||||
@@ -809,105 +761,4 @@ class QueryOperations {
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A value object that encapsulates common tasks required when running {@literal aggregations}.
|
||||
*
|
||||
* @since 3.2
|
||||
*/
|
||||
class AggregationDefinition {
|
||||
|
||||
private final Aggregation aggregation;
|
||||
private final Lazy<AggregationOperationContext> aggregationOperationContext;
|
||||
private final Lazy<List<Document>> pipeline;
|
||||
private final @Nullable Class<?> inputType;
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link AggregationDefinition} extracting the input type from either the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
|
||||
* the given {@literal aggregationOperationContext} if present. <br />
|
||||
* Creates a new {@link AggregationOperationContext} if none given, based on the {@link Aggregation} input type and
|
||||
* the desired {@link AggregationOptions#getDomainTypeMapping() domain type mapping}. <br />
|
||||
* Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
|
||||
*
|
||||
* @param aggregation the source aggregation.
|
||||
* @param aggregationOperationContext can be {@literal null}.
|
||||
*/
|
||||
AggregationDefinition(Aggregation aggregation, @Nullable AggregationOperationContext aggregationOperationContext) {
|
||||
|
||||
this.aggregation = aggregation;
|
||||
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
} else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext) {
|
||||
this.inputType = ((TypeBasedAggregationOperationContext) aggregationOperationContext).getType();
|
||||
} else {
|
||||
this.inputType = null;
|
||||
}
|
||||
|
||||
this.aggregationOperationContext = Lazy.of(() -> aggregationOperationContext != null ? aggregationOperationContext
|
||||
: aggregationUtil.createAggregationContext(aggregation, getInputType()));
|
||||
this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new instance of {@link AggregationDefinition} extracting the input type from either the
|
||||
* {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or
|
||||
* the given {@literal aggregationOperationContext} if present. <br />
|
||||
* Creates a new {@link AggregationOperationContext} based on the {@link Aggregation} input type and the desired
|
||||
* {@link AggregationOptions#getDomainTypeMapping() domain type mapping}. <br />
|
||||
* Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse.
|
||||
*
|
||||
* @param aggregation the source aggregation.
|
||||
* @param inputType can be {@literal null}.
|
||||
*/
|
||||
AggregationDefinition(Aggregation aggregation, @Nullable Class<?> inputType) {
|
||||
|
||||
this.aggregation = aggregation;
|
||||
|
||||
if (aggregation instanceof TypedAggregation) {
|
||||
this.inputType = ((TypedAggregation<?>) aggregation).getInputType();
|
||||
} else {
|
||||
this.inputType = inputType;
|
||||
}
|
||||
|
||||
this.aggregationOperationContext = Lazy
|
||||
.of(() -> aggregationUtil.createAggregationContext(aggregation, getInputType()));
|
||||
this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the already mapped pipeline.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
List<Document> getAggregationPipeline() {
|
||||
return pipeline.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}.
|
||||
* @see AggregationPipeline#isOutOrMerge()
|
||||
*/
|
||||
boolean isOutOrMerge() {
|
||||
return aggregation.getPipeline().isOutOrMerge();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the {@link AggregationOperationContext} used for mapping the pipeline.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
AggregationOperationContext getAggregationOperationContext() {
|
||||
return aggregationOperationContext.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the input type to map the pipeline against. Can be {@literal null}.
|
||||
*/
|
||||
@Nullable
|
||||
Class<?> getInputType() {
|
||||
return inputType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
@@ -58,22 +62,15 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio
|
||||
return new ReactiveAggregationSupport<>(template, domainType, null, null);
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveAggregationSupport<T>
|
||||
implements AggregationOperationWithAggregation<T>, ReactiveAggregation<T>, TerminatingAggregationOperation<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final Aggregation aggregation;
|
||||
private final String collection;
|
||||
|
||||
ReactiveAggregationSupport(ReactiveMongoTemplate template, Class<T> domainType, Aggregation aggregation,
|
||||
String collection) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.aggregation = aggregation;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
Aggregation aggregation;
|
||||
String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -71,7 +71,7 @@ public interface ReactiveChangeStreamOperation {
|
||||
/**
|
||||
* Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription}
|
||||
* is {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped.
|
||||
*/
|
||||
Flux<ChangeStreamEvent<T>> listen();
|
||||
|
||||
@@ -91,10 +91,10 @@ public interface ReactiveFindOperation {
|
||||
* Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will
|
||||
* not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link org.reactivestreams.Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the
|
||||
* document at the "end" of the collection and then the application deletes that document.
|
||||
* <br />
|
||||
* <p />
|
||||
* A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the
|
||||
* streams will linger and exhaust resources. <br/>
|
||||
* <strong>NOTE:</strong> Requires a capped collection.
|
||||
@@ -106,12 +106,6 @@ public interface ReactiveFindOperation {
|
||||
|
||||
/**
|
||||
* Get the number of matching elements.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but
|
||||
* guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications
|
||||
* needs use {@link ReactiveMongoOperations#estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @return {@link Mono} emitting total number of matching elements. Never {@literal null}.
|
||||
*/
|
||||
|
||||
@@ -15,10 +15,15 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.Document;
|
||||
|
||||
import org.springframework.dao.IncorrectResultSizeDataAccessException;
|
||||
import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
@@ -34,15 +39,12 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveFindOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -62,24 +64,16 @@ class ReactiveFindOperationSupport implements ReactiveFindOperation {
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveFindSupport<T>
|
||||
implements ReactiveFind<T>, FindWithCollection<T>, FindWithProjection<T>, FindWithQuery<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Class<T> returnType;
|
||||
private final String collection;
|
||||
private final Query query;
|
||||
|
||||
ReactiveFindSupport(ReactiveMongoTemplate template, Class<?> domainType, Class<T> returnType,
|
||||
String collection, Query query) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.returnType = returnType;
|
||||
this.collection = collection;
|
||||
this.query = query;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<?> domainType;
|
||||
Class<T> returnType;
|
||||
String collection;
|
||||
Query query;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -15,6 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@@ -30,13 +34,10 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveInsertOperationSupport implements ReactiveInsertOperation {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveInsertOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -50,18 +51,13 @@ class ReactiveInsertOperationSupport implements ReactiveInsertOperation {
|
||||
return new ReactiveInsertSupport<>(template, domainType, null);
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveInsertSupport<T> implements ReactiveInsert<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final String collection;
|
||||
|
||||
ReactiveInsertSupport(ReactiveMongoTemplate template, Class<T> domainType, String collection) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
|
||||
@@ -29,15 +31,12 @@ import org.springframework.util.StringUtils;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveMapReduceOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javascript)
|
||||
|
||||
@@ -15,15 +15,11 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
import org.springframework.util.Assert;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.context.Context;
|
||||
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.reactivestreams.Publisher;
|
||||
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
import com.mongodb.reactivestreams.client.ClientSession;
|
||||
|
||||
/**
|
||||
@@ -33,7 +29,7 @@ import com.mongodb.reactivestreams.client.ClientSession;
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 2.1
|
||||
* @see Mono#deferContextual(Function)
|
||||
* @see Mono#subscriberContext()
|
||||
* @see Context
|
||||
*/
|
||||
public class ReactiveMongoContext {
|
||||
@@ -50,14 +46,8 @@ public class ReactiveMongoContext {
|
||||
*/
|
||||
public static Mono<ClientSession> getSession() {
|
||||
|
||||
return Mono.deferContextual(ctx -> {
|
||||
|
||||
if (ctx.hasKey(SESSION_KEY)) {
|
||||
return ctx.<Mono<ClientSession>> get(SESSION_KEY);
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
});
|
||||
return Mono.subscriberContext().filter(ctx -> ctx.hasKey(SESSION_KEY))
|
||||
.flatMap(ctx -> ctx.<Mono<ClientSession>> get(SESSION_KEY));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -59,7 +59,7 @@ import com.mongodb.reactivestreams.client.MongoCollection;
|
||||
* Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability
|
||||
* (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using
|
||||
* {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>NOTE:</strong> Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB
|
||||
* specific documentation to learn more about <a href="https://docs.mongodb.com/manual/core/transactions/">Multi
|
||||
* Document Transactions</a>.
|
||||
@@ -121,7 +121,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not
|
||||
@@ -133,7 +133,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param entityClass class that determines the collection to use. Must not be {@literal null}.
|
||||
@@ -145,7 +145,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveCollectionCallback} on the collection of the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Allows for returning a result object, that is a domain object or a collection of domain objects.
|
||||
*
|
||||
* @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be
|
||||
@@ -159,7 +159,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession}
|
||||
* provided by the given {@link Supplier} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
@@ -178,7 +178,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession}
|
||||
* with given {@literal sessionOptions} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
@@ -192,7 +192,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the
|
||||
* {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle. Use
|
||||
* {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the
|
||||
* {@link ClientSession} when done.
|
||||
@@ -205,7 +205,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
|
||||
* <br />
|
||||
* <p />
|
||||
* <strong>Note:</strong> It is up to the caller to manage the {@link ClientSession} lifecycle.
|
||||
*
|
||||
* @param session must not be {@literal null}.
|
||||
@@ -218,7 +218,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Initiate a new {@link ClientSession} and obtain a {@link ClientSession session} bound instance of
|
||||
* {@link ReactiveSessionScoped}. Starts the transaction and adds the {@link ClientSession} to each and every command
|
||||
* issued against MongoDB.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction
|
||||
* that is {@link ClientSession#commitTransaction() committed} on success. Transactions are
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
@@ -233,7 +233,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped}, start the transaction and
|
||||
* bind the {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against
|
||||
* MongoDB.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction
|
||||
* that is {@link ClientSession#commitTransaction() committed} on success. Transactions are
|
||||
* {@link ClientSession#abortTransaction() rolled back} upon errors.
|
||||
@@ -293,7 +293,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* created on first interaction with the server. Collections can be explicitly created via
|
||||
* {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class)
|
||||
* exists} first.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection.
|
||||
@@ -303,7 +303,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a name indicated by the entity class exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the name of the collection. Must not be {@literal null}.
|
||||
@@ -313,7 +313,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Check to see if a collection with a given name exists.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection. Must not be {@literal null}.
|
||||
@@ -323,7 +323,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the name indicated by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}.
|
||||
@@ -332,7 +332,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Drop the collection with the given name.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Translate any exceptions as necessary.
|
||||
*
|
||||
* @param collectionName name of the collection to drop/delete.
|
||||
@@ -341,10 +341,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a {@link Flux} of objects of type T from the collection used by the entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -355,10 +355,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Query for a {@link Flux} of objects of type T from the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way
|
||||
* to map objects since the test for class type is done in the client and not on the server.
|
||||
*
|
||||
@@ -371,10 +371,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the
|
||||
* specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -388,10 +388,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -435,10 +435,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -451,10 +451,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -566,10 +566,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The raw results will be mapped to the given entity class and are returned as stream. The name of the
|
||||
* inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with
|
||||
* {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause
|
||||
* {@link IllegalArgumentException}.
|
||||
@@ -584,10 +584,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the
|
||||
* {@code inputType}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with
|
||||
* {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause
|
||||
* {@link IllegalArgumentException}.
|
||||
@@ -604,9 +604,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Execute an aggregation operation.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The raw results will be mapped to the given entity class.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with
|
||||
* {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause
|
||||
* {@link IllegalArgumentException}.
|
||||
@@ -676,7 +676,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Flux<GeoResult<T>> geoNear(NearQuery near, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -691,7 +691,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional
|
||||
@@ -707,7 +707,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, Class<T> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -725,7 +725,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
<T> Mono<T> findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class<T> entityClass);
|
||||
|
||||
/**
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify</a>
|
||||
* Triggers <a href="https://docs.mongodb.org/manual/reference/method/db.collection.findAndModify/">findAndModify<a/>
|
||||
* to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking
|
||||
* {@link FindAndModifyOptions} into account.
|
||||
*
|
||||
@@ -746,7 +746,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
@@ -764,7 +764,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement}
|
||||
* document. <br />
|
||||
* Options are defaulted to {@link FindAndReplaceOptions#empty()}. <br />
|
||||
@@ -783,7 +783,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -803,7 +803,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -825,7 +825,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -849,7 +849,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -876,7 +876,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Triggers
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace</a>
|
||||
* <a href="https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndReplace/">findOneAndReplace<a/>
|
||||
* to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document
|
||||
* taking {@link FindAndReplaceOptions} into account. <br />
|
||||
* <strong>NOTE:</strong> The replacement entity must not hold an {@literal id}.
|
||||
@@ -902,9 +902,9 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the
|
||||
* specified type. The first document that matches the query is returned and also removed from the collection in the
|
||||
* database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -918,10 +918,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified
|
||||
* type. The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -940,12 +940,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(Class)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -962,12 +956,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents.
|
||||
* @param collectionName must not be {@literal null} or empty.
|
||||
@@ -983,12 +971,6 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* influence on the resulting number of documents found as those values are passed on to the server and potentially
|
||||
* limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to
|
||||
* count all matches.
|
||||
* <br />
|
||||
* This method uses an
|
||||
* {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions)
|
||||
* aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees
|
||||
* shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use
|
||||
* {@link #estimatedCount(String)} for empty queries instead.
|
||||
*
|
||||
* @param query the {@link Query} class that specifies the criteria used to find documents. Must not be
|
||||
* {@literal null}.
|
||||
@@ -998,70 +980,36 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
*/
|
||||
Mono<Long> count(Query query, @Nullable Class<?> entityClass, String collectionName);
|
||||
|
||||
/**
|
||||
* Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type},
|
||||
* based on collection statistics.
|
||||
* <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param entityClass must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting the estimated number of documents.
|
||||
* @since 3.1
|
||||
*/
|
||||
default Mono<Long> estimatedCount(Class<?> entityClass) {
|
||||
|
||||
Assert.notNull(entityClass, "Entity class must not be null!");
|
||||
return estimatedCount(getCollectionName(entityClass));
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate the number of documents in the given collection based on collection statistics.
|
||||
* <br />
|
||||
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside
|
||||
* transactions.
|
||||
*
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @return a {@link Mono} emitting the estimated number of documents.
|
||||
* @since 3.1
|
||||
*/
|
||||
Mono<Long> estimatedCount(String collectionName);
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> insert(T objectToSave);
|
||||
|
||||
/**
|
||||
* Insert the object into the specified collection.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the inserted object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> insert(T objectToSave, String collectionName);
|
||||
|
||||
@@ -1094,15 +1042,16 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
|
||||
/**
|
||||
* Insert the object into the collection for the entity type of the object to save.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Insert is used to initially store the object into the database. To update an existing object use the save method.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
@@ -1140,54 +1089,52 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
* <br />
|
||||
* The {@code objectToSave} must not be collection-like.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> save(T objectToSave);
|
||||
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
* @throws IllegalArgumentException in case the {@code objectToSave} is collection-like.
|
||||
*/
|
||||
<T> Mono<T> save(T objectToSave, String collectionName);
|
||||
|
||||
/**
|
||||
* Save the object to the collection for the entity type of the object to save. This will perform an insert if the
|
||||
* object is not already present, that is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation"> Spring's Type Conversion</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See
|
||||
* <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation" > Spring's
|
||||
* Type Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
@@ -1197,16 +1144,17 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
/**
|
||||
* Save the object to the specified collection. This will perform an insert if the object is not already present, that
|
||||
* is an 'upsert'.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a
|
||||
* String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API.
|
||||
* See <a href="https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type Conversion</a> for more details.
|
||||
* property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See <a
|
||||
* https://docs.spring.io/spring/docs/current/spring-framework-reference/core.html#validation">Spring's Type
|
||||
* Conversion"</a> for more details.
|
||||
*
|
||||
* @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}.
|
||||
* @param objectToSave the object to store in the collection. Must not be {@literal null}.
|
||||
* @param collectionName name of the collection to store the object in. Must not be {@literal null}.
|
||||
* @return the saved object.
|
||||
*/
|
||||
@@ -1478,10 +1426,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite
|
||||
* stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1497,10 +1445,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite
|
||||
* stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless
|
||||
* configured otherwise, an instance of {@link MappingMongoConverter} will be used.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more
|
||||
* feature rich {@link Query}.
|
||||
*
|
||||
@@ -1517,10 +1465,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to
|
||||
* filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <br />
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
@@ -1541,10 +1489,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter
|
||||
* events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is
|
||||
* {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <br />
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
@@ -1566,10 +1514,10 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations {
|
||||
* Subscribe to a MongoDB <a href="https://docs.mongodb.com/manual/changeStreams/">Change Stream</a> via the reactive
|
||||
* infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed
|
||||
* unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the
|
||||
* {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
|
||||
* <br />
|
||||
* <p />
|
||||
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken}
|
||||
* for resuming change streams.
|
||||
*
|
||||
|
||||
@@ -17,20 +17,15 @@ package org.springframework.data.mongodb.core;
|
||||
|
||||
import static org.springframework.data.mongodb.core.query.SerializationUtils.*;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
@@ -60,7 +55,6 @@ import org.springframework.data.convert.EntityReader;
|
||||
import org.springframework.data.geo.Distance;
|
||||
import org.springframework.data.geo.GeoResult;
|
||||
import org.springframework.data.geo.Metric;
|
||||
import org.springframework.data.mapping.MappingException;
|
||||
import org.springframework.data.mapping.PersistentEntity;
|
||||
import org.springframework.data.mapping.callback.ReactiveEntityCallbacks;
|
||||
import org.springframework.data.mapping.context.MappingContext;
|
||||
@@ -70,7 +64,6 @@ import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils;
|
||||
import org.springframework.data.mongodb.SessionSynchronization;
|
||||
import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.CountContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DeleteContext;
|
||||
import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext;
|
||||
@@ -80,7 +73,6 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
|
||||
import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext;
|
||||
import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
|
||||
import org.springframework.data.mongodb.core.convert.DbRefResolver;
|
||||
@@ -110,7 +102,6 @@ import org.springframework.data.mongodb.core.query.NearQuery;
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter;
|
||||
import org.springframework.data.mongodb.core.timeseries.Granularity;
|
||||
import org.springframework.data.mongodb.core.validation.Validator;
|
||||
import org.springframework.data.mongodb.util.BsonUtils;
|
||||
import org.springframework.data.projection.SpelAwareProxyProjectionFactory;
|
||||
@@ -129,7 +120,16 @@ import com.mongodb.CursorType;
|
||||
import com.mongodb.MongoException;
|
||||
import com.mongodb.ReadPreference;
|
||||
import com.mongodb.WriteConcern;
|
||||
import com.mongodb.client.model.*;
|
||||
import com.mongodb.client.model.CountOptions;
|
||||
import com.mongodb.client.model.CreateCollectionOptions;
|
||||
import com.mongodb.client.model.DeleteOptions;
|
||||
import com.mongodb.client.model.FindOneAndDeleteOptions;
|
||||
import com.mongodb.client.model.FindOneAndReplaceOptions;
|
||||
import com.mongodb.client.model.FindOneAndUpdateOptions;
|
||||
import com.mongodb.client.model.ReplaceOptions;
|
||||
import com.mongodb.client.model.ReturnDocument;
|
||||
import com.mongodb.client.model.UpdateOptions;
|
||||
import com.mongodb.client.model.ValidationOptions;
|
||||
import com.mongodb.client.model.changestream.FullDocument;
|
||||
import com.mongodb.client.result.DeleteResult;
|
||||
import com.mongodb.client.result.InsertOneResult;
|
||||
@@ -158,7 +158,6 @@ import com.mongodb.reactivestreams.client.MongoDatabase;
|
||||
* @author Christoph Strobl
|
||||
* @author Roman Puchkovskiy
|
||||
* @author Mathieu Ouellet
|
||||
* @author Yadhukrishna S Pai
|
||||
* @since 2.0
|
||||
*/
|
||||
public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware {
|
||||
@@ -167,6 +166,18 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class);
|
||||
private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE;
|
||||
private static final Collection<Class<?>> ITERABLE_CLASSES;
|
||||
|
||||
static {
|
||||
|
||||
Set<Class<?>> iterableClasses = new HashSet<>();
|
||||
iterableClasses.add(List.class);
|
||||
iterableClasses.add(Collection.class);
|
||||
iterableClasses.add(Iterator.class);
|
||||
iterableClasses.add(Publisher.class);
|
||||
|
||||
ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses);
|
||||
}
|
||||
|
||||
private final MongoConverter mongoConverter;
|
||||
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
|
||||
@@ -362,7 +373,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* Set the {@link ReactiveEntityCallbacks} instance to use when invoking
|
||||
* {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the
|
||||
* {@link ReactiveBeforeSaveCallback}.
|
||||
* <br />
|
||||
* <p />
|
||||
* Overrides potentially existing {@link ReactiveEntityCallbacks}.
|
||||
*
|
||||
* @param entityCallbacks must not be {@literal null}.
|
||||
@@ -573,7 +584,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
ReactiveMongoTemplate.this);
|
||||
|
||||
return Flux.from(action.doInSession(operations)) //
|
||||
.contextWrite(ctx -> ReactiveMongoContext.setSession(ctx, Mono.just(session)));
|
||||
.subscriberContext(ctx -> ReactiveMongoContext.setSession(ctx, Mono.just(session)));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -666,7 +677,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class)
|
||||
*/
|
||||
public <T> Mono<MongoCollection<Document>> createCollection(Class<T> entityClass) {
|
||||
return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions());
|
||||
return createCollection(entityClass, CollectionOptions.empty());
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -945,7 +956,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
|
||||
return doAggregate(aggregation, inputCollectionName, aggregation.getInputType(), outputType);
|
||||
AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(),
|
||||
mappingContext, queryMapper);
|
||||
return aggregate(aggregation, inputCollectionName, outputType, context);
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -963,7 +976,9 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <O> Flux<O> aggregate(Aggregation aggregation, Class<?> inputType, Class<O> outputType) {
|
||||
return doAggregate(aggregation, getCollectionName(inputType), inputType, outputType);
|
||||
|
||||
return aggregate(aggregation, getCollectionName(inputType), outputType,
|
||||
new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper));
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -972,34 +987,45 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*/
|
||||
@Override
|
||||
public <O> Flux<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType) {
|
||||
return doAggregate(aggregation, collectionName, null, outputType);
|
||||
return aggregate(aggregation, collectionName, outputType, null);
|
||||
}
|
||||
|
||||
protected <O> Flux<O> doAggregate(Aggregation aggregation, String collectionName, @Nullable Class<?> inputType,
|
||||
Class<O> outputType) {
|
||||
/**
|
||||
* @param aggregation must not be {@literal null}.
|
||||
* @param collectionName must not be {@literal null}.
|
||||
* @param outputType must not be {@literal null}.
|
||||
* @param context can be {@literal null} and will be defaulted to {@link Aggregation#DEFAULT_CONTEXT}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
protected <O> Flux<O> aggregate(Aggregation aggregation, String collectionName, Class<O> outputType,
|
||||
@Nullable AggregationOperationContext context) {
|
||||
|
||||
Assert.notNull(aggregation, "Aggregation pipeline must not be null!");
|
||||
Assert.hasText(collectionName, "Collection name must not be null or empty!");
|
||||
Assert.notNull(outputType, "Output type must not be null!");
|
||||
|
||||
AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext);
|
||||
AggregationOperationContext rootContext = aggregationUtil.prepareAggregationContext(aggregation, context);
|
||||
|
||||
AggregationOptions options = aggregation.getOptions();
|
||||
List<Document> pipeline = aggregationUtil.createPipeline(aggregation, rootContext);
|
||||
|
||||
Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming!");
|
||||
|
||||
AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType);
|
||||
|
||||
if (LOGGER.isDebugEnabled()) {
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()),
|
||||
collectionName);
|
||||
LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName);
|
||||
}
|
||||
|
||||
ReadDocumentCallback<O> readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName);
|
||||
return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(),
|
||||
ctx.isOutOrMerge(), options, readCallback, ctx.getInputType()));
|
||||
return execute(collectionName,
|
||||
collection -> aggregateAndMap(collection, pipeline, aggregation.getPipeline().isOutOrMerge(), options,
|
||||
readCallback,
|
||||
aggregation instanceof TypedAggregation ? ((TypedAggregation<?>) aggregation).getInputType() : null));
|
||||
}
|
||||
|
||||
private <O> Flux<O> aggregateAndMap(MongoCollection<Document> collection, List<Document> pipeline,
|
||||
boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback<O> readCallback,
|
||||
@Nullable Class<?> inputType) {
|
||||
boolean isOutOrMerge,
|
||||
AggregationOptions options, ReadDocumentCallback<O> readCallback, @Nullable Class<?> inputType) {
|
||||
|
||||
AggregatePublisher<Document> cursor = collection.aggregate(pipeline, Document.class)
|
||||
.allowDiskUse(options.isAllowDiskUse());
|
||||
@@ -1009,7 +1035,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}
|
||||
|
||||
options.getComment().ifPresent(cursor::comment);
|
||||
options.getHint().ifPresent(cursor::hint);
|
||||
|
||||
Optionals.firstNonEmpty(options::getCollation, () -> operations.forType(inputType).getCollation()) //
|
||||
.map(Collation::toMongoCollation) //
|
||||
@@ -1233,15 +1258,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#estimatedCount(java.lang.String)
|
||||
*/
|
||||
@Override
|
||||
public Mono<Long> estimatedCount(String collectionName) {
|
||||
return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions());
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the actual count operation against the collection with given name.
|
||||
*
|
||||
@@ -1256,11 +1272,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options));
|
||||
}
|
||||
|
||||
protected Mono<Long> doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) {
|
||||
|
||||
return createMono(collectionName, collection -> collection.estimatedDocumentCount(options));
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono)
|
||||
@@ -2097,7 +2108,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
AggregationOperationContext context = agg instanceof TypedAggregation
|
||||
? new TypeBasedAggregationOperationContext(((TypedAggregation<?>) agg).getInputType(),
|
||||
getConverter().getMappingContext(), queryMapper)
|
||||
: new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper);
|
||||
: Aggregation.DEFAULT_CONTEXT;
|
||||
|
||||
return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument",
|
||||
Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns")));
|
||||
@@ -2407,8 +2418,8 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @param query the query document that specifies the criteria used to find a record.
|
||||
* @param fields the document that specifies the fields to be returned.
|
||||
* @param entityClass the parameterized type of the returned list.
|
||||
* @param preparer allows for customization of the {@link com.mongodb.client.FindIterable} used when iterating over
|
||||
* the result set, (apply limits, skips and so on).
|
||||
* @param preparer allows for customization of the {@link com.mongodb.client.FindIterable} used when iterating over the result set, (apply
|
||||
* limits, skips and so on).
|
||||
* @return the {@link List} of converted objects.
|
||||
*/
|
||||
protected <T> Flux<T> doFind(String collectionName, Document query, Document fields, Class<T> entityClass,
|
||||
@@ -2506,20 +2517,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
result.validationOptions(validationOptions);
|
||||
});
|
||||
|
||||
collectionOptions.getTimeSeriesOptions().map(operations.forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> {
|
||||
|
||||
TimeSeriesOptions options = new TimeSeriesOptions(it.getTimeField());
|
||||
|
||||
if (StringUtils.hasText(it.getMetaField())) {
|
||||
options.metaField(it.getMetaField());
|
||||
}
|
||||
if (!Granularity.DEFAULT.equals(it.getGranularity())) {
|
||||
options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase()));
|
||||
}
|
||||
|
||||
result.timeSeriesOptions(options);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -2537,7 +2534,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
/**
|
||||
* Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter.
|
||||
* The first document that matches the query is returned and also removed from the collection in the database.
|
||||
* <br />
|
||||
* <p/>
|
||||
* The query document is specified as a standard Document and so is the fields specification.
|
||||
*
|
||||
* @param collectionName name of the collection to retrieve the objects from
|
||||
@@ -2680,27 +2677,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @deprecated since 3.2. Call {@link #ensureNotCollectionLike(Object)} instead.
|
||||
*/
|
||||
protected void ensureNotIterable(@Nullable Object source) {
|
||||
ensureNotCollectionLike(source);
|
||||
}
|
||||
protected void ensureNotIterable(Object o) {
|
||||
|
||||
/**
|
||||
* Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or
|
||||
* {@link Iterator}.
|
||||
*
|
||||
* @param source can be {@literal null}.
|
||||
* @since 3.2.
|
||||
*/
|
||||
protected void ensureNotCollectionLike(@Nullable Object source) {
|
||||
boolean isIterable = o.getClass().isArray()
|
||||
|| ITERABLE_CLASSES.stream().anyMatch(iterableClass -> iterableClass.isAssignableFrom(o.getClass())
|
||||
|| o.getClass().getName().equals(iterableClass.getName()));
|
||||
|
||||
if (EntityOperations.isCollectionLike(source) || source instanceof Publisher) {
|
||||
if (isIterable) {
|
||||
throw new IllegalArgumentException("Cannot use a collection here.");
|
||||
}
|
||||
}
|
||||
@@ -2742,14 +2725,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
return potentiallyForceAcknowledgedWrite(wc);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link MongoDatabaseFactory} in use.
|
||||
* @since 3.1.4
|
||||
*/
|
||||
public ReactiveMongoDatabaseFactory getMongoDatabaseFactory() {
|
||||
return mongoDatabaseFactory;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) {
|
||||
|
||||
@@ -2918,6 +2893,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
*
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private static class FindCallback implements ReactiveCollectionQueryCallback<Document> {
|
||||
|
||||
private final @Nullable Document query;
|
||||
@@ -2927,12 +2903,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
this(query, null);
|
||||
}
|
||||
|
||||
FindCallback(Document query, Document fields) {
|
||||
|
||||
this.query = query;
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FindPublisher<Document> doInCollection(MongoCollection<Document> collection) {
|
||||
|
||||
@@ -2986,6 +2956,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
/**
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private static class FindAndModifyCallback implements ReactiveCollectionCallback<Document> {
|
||||
|
||||
private final Document query;
|
||||
@@ -2995,17 +2966,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
private final List<Document> arrayFilters;
|
||||
private final FindAndModifyOptions options;
|
||||
|
||||
FindAndModifyCallback(Document query, Document fields, Document sort, Object update, List<Document> arrayFilters,
|
||||
FindAndModifyOptions options) {
|
||||
|
||||
this.query = query;
|
||||
this.fields = fields;
|
||||
this.sort = sort;
|
||||
this.update = update;
|
||||
this.arrayFilters = arrayFilters;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Publisher<Document> doInCollection(MongoCollection<Document> collection)
|
||||
throws MongoException, DataAccessException {
|
||||
@@ -3061,6 +3021,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
@RequiredArgsConstructor(access = AccessLevel.PACKAGE)
|
||||
private static class FindAndReplaceCallback implements ReactiveCollectionCallback<Document> {
|
||||
|
||||
private final Document query;
|
||||
@@ -3070,17 +3031,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
private final @Nullable com.mongodb.client.model.Collation collation;
|
||||
private final FindAndReplaceOptions options;
|
||||
|
||||
FindAndReplaceCallback(Document query, Document fields, Document sort, Document update,
|
||||
com.mongodb.client.model.Collation collation, FindAndReplaceOptions options) {
|
||||
|
||||
this.query = query;
|
||||
this.fields = fields;
|
||||
this.sort = sort;
|
||||
this.update = update;
|
||||
this.collation = collation;
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.ReactiveCollectionCallback#doInCollection(com.mongodb.reactivestreams.client.MongoCollection)
|
||||
@@ -3177,14 +3127,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName));
|
||||
|
||||
T entity = reader.read(type, document);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
T source = reader.read(type, document);
|
||||
if (source != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName));
|
||||
return maybeCallAfterConvert(source, document, collectionName);
|
||||
}
|
||||
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName));
|
||||
return maybeCallAfterConvert(entity, document, collectionName);
|
||||
return Mono.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3198,20 +3147,13 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
* @author Roman Puchkovskiy
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
private class ProjectingReadCallback<S, T> implements DocumentCallback<T> {
|
||||
|
||||
private final EntityReader<Object, Bson> reader;
|
||||
private final Class<S> entityType;
|
||||
private final Class<T> targetType;
|
||||
private final String collectionName;
|
||||
|
||||
ProjectingReadCallback(EntityReader<Object, Bson> reader, Class<S> entityType, Class<T> targetType,
|
||||
String collectionName) {
|
||||
this.reader = reader;
|
||||
this.entityType = entityType;
|
||||
this.targetType = targetType;
|
||||
this.collectionName = collectionName;
|
||||
}
|
||||
private final @NonNull EntityReader<Object, Bson> reader;
|
||||
private final @NonNull Class<S> entityType;
|
||||
private final @NonNull Class<T> targetType;
|
||||
private final @NonNull String collectionName;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Mono<T> doWith(Document document) {
|
||||
@@ -3222,17 +3164,16 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
|
||||
maybeEmitEvent(new AfterLoadEvent<>(document, typeToRead, collectionName));
|
||||
|
||||
Object entity = reader.read(typeToRead, document);
|
||||
|
||||
if (entity == null) {
|
||||
throw new MappingException(String.format("EntityReader %s returned null", reader));
|
||||
}
|
||||
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity;
|
||||
Object source = reader.read(typeToRead, document);
|
||||
Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source;
|
||||
|
||||
T castEntity = (T) result;
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
if (castEntity != null) {
|
||||
maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName));
|
||||
return maybeCallAfterConvert(castEntity, document, collectionName);
|
||||
}
|
||||
|
||||
return Mono.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3351,10 +3292,6 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
if (meta.getCursorBatchSize() != null) {
|
||||
findPublisherToUse = findPublisherToUse.batchSize(meta.getCursorBatchSize());
|
||||
}
|
||||
|
||||
if (meta.getAllowDiskUse() != null) {
|
||||
findPublisherToUse = findPublisherToUse.allowDiskUse(meta.getAllowDiskUse());
|
||||
}
|
||||
}
|
||||
|
||||
} catch (RuntimeException e) {
|
||||
@@ -3390,7 +3327,7 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
/**
|
||||
* {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the
|
||||
* server through the driver API.
|
||||
* <br />
|
||||
* <p />
|
||||
* The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired
|
||||
* target method matching the actual arguments plus a {@link ClientSession}.
|
||||
*
|
||||
@@ -3437,14 +3374,11 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati
|
||||
}
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
class IndexCreatorEventListener implements ApplicationListener<MappingContextEvent<?, ?>> {
|
||||
|
||||
final Consumer<Throwable> subscriptionExceptionHandler;
|
||||
|
||||
public IndexCreatorEventListener(Consumer<Throwable> subscriptionExceptionHandler) {
|
||||
this.subscriptionExceptionHandler = subscriptionExceptionHandler;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onApplicationEvent(MappingContextEvent<?, ?> event) {
|
||||
|
||||
|
||||
@@ -15,6 +15,10 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@@ -31,15 +35,12 @@ import com.mongodb.client.result.DeleteResult;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final ReactiveMongoTemplate tempate;
|
||||
|
||||
ReactiveRemoveOperationSupport(ReactiveMongoTemplate tempate) {
|
||||
this.tempate = tempate;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate tempate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -53,20 +54,14 @@ class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation {
|
||||
return new ReactiveRemoveSupport<>(tempate, domainType, ALL_QUERY, null);
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveRemoveSupport<T> implements ReactiveRemove<T>, RemoveWithCollection<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<T> domainType;
|
||||
private final Query query;
|
||||
private final String collection;
|
||||
|
||||
ReactiveRemoveSupport(ReactiveMongoTemplate template, Class<T> domainType, Query query, String collection) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.query = query;
|
||||
this.collection = collection;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<T> domainType;
|
||||
Query query;
|
||||
String collection;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
|
||||
@@ -32,7 +32,7 @@ public interface ReactiveSessionCallback<T> {
|
||||
/**
|
||||
* Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is
|
||||
* inferred directly into the operation so that no further interaction is necessary.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and
|
||||
* others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway
|
||||
* objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or
|
||||
|
||||
@@ -33,7 +33,7 @@ public interface ReactiveSessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
@@ -47,7 +47,7 @@ public interface ReactiveSessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
|
||||
@@ -15,10 +15,13 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.NonNull;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.springframework.data.mongodb.core.query.Query;
|
||||
import org.springframework.data.mongodb.core.query.UpdateDefinition;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.StringUtils;
|
||||
@@ -32,15 +35,12 @@ import com.mongodb.client.result.UpdateResult;
|
||||
* @author Christoph Strobl
|
||||
* @since 2.0
|
||||
*/
|
||||
@RequiredArgsConstructor
|
||||
class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
|
||||
private static final Query ALL_QUERY = new Query();
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
|
||||
ReactiveUpdateOperationSupport(ReactiveMongoTemplate template) {
|
||||
this.template = template;
|
||||
}
|
||||
private final @NonNull ReactiveMongoTemplate template;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -54,34 +54,21 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
return new ReactiveUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType);
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
@FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true)
|
||||
static class ReactiveUpdateSupport<T>
|
||||
implements ReactiveUpdate<T>, UpdateWithCollection<T>, UpdateWithQuery<T>, TerminatingUpdate<T>,
|
||||
FindAndReplaceWithOptions<T>, FindAndReplaceWithProjection<T>, TerminatingFindAndReplace<T> {
|
||||
|
||||
private final ReactiveMongoTemplate template;
|
||||
private final Class<?> domainType;
|
||||
private final Query query;
|
||||
private final org.springframework.data.mongodb.core.query.UpdateDefinition update;
|
||||
@Nullable private final String collection;
|
||||
@Nullable private final FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable private final FindAndReplaceOptions findAndReplaceOptions;
|
||||
@Nullable private final Object replacement;
|
||||
private final Class<T> targetType;
|
||||
|
||||
ReactiveUpdateSupport(ReactiveMongoTemplate template, Class<?> domainType, Query query, UpdateDefinition update,
|
||||
String collection, FindAndModifyOptions findAndModifyOptions, FindAndReplaceOptions findAndReplaceOptions,
|
||||
Object replacement, Class<T> targetType) {
|
||||
|
||||
this.template = template;
|
||||
this.domainType = domainType;
|
||||
this.query = query;
|
||||
this.update = update;
|
||||
this.collection = collection;
|
||||
this.findAndModifyOptions = findAndModifyOptions;
|
||||
this.findAndReplaceOptions = findAndReplaceOptions;
|
||||
this.replacement = replacement;
|
||||
this.targetType = targetType;
|
||||
}
|
||||
@NonNull ReactiveMongoTemplate template;
|
||||
@NonNull Class<?> domainType;
|
||||
Query query;
|
||||
org.springframework.data.mongodb.core.query.UpdateDefinition update;
|
||||
@Nullable String collection;
|
||||
@Nullable FindAndModifyOptions findAndModifyOptions;
|
||||
@Nullable FindAndReplaceOptions findAndReplaceOptions;
|
||||
@Nullable Object replacement;
|
||||
@NonNull Class<T> targetType;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -136,9 +123,7 @@ class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation {
|
||||
|
||||
String collectionName = getCollectionName();
|
||||
|
||||
return template.findAndModify(query, update,
|
||||
findAndModifyOptions != null ? findAndModifyOptions : FindAndModifyOptions.none(), targetType,
|
||||
collectionName);
|
||||
return template.findAndModify(query, update, findAndModifyOptions != null ? findAndModifyOptions : FindAndModifyOptions.none(), targetType, collectionName);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
@@ -23,7 +23,7 @@ import org.springframework.lang.Nullable;
|
||||
|
||||
|
||||
/**
|
||||
* Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions.
|
||||
* Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Oliver Gierke
|
||||
@@ -72,10 +72,10 @@ public interface ScriptOperations {
|
||||
Object call(String scriptName, Object... args);
|
||||
|
||||
/**
|
||||
* Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name.
|
||||
* Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name.
|
||||
*
|
||||
* @param scriptName must not be {@literal null} or empty.
|
||||
* @return false if no {@literal ServerSideJavaScript} with given name exists.
|
||||
* @return false if no {@link ServerSideJavaScript} with given name exists.
|
||||
*/
|
||||
boolean exists(String scriptName);
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ public interface SessionCallback<T> {
|
||||
/**
|
||||
* Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred
|
||||
* directly into the operation so that no further interaction is necessary.
|
||||
* <br />
|
||||
* <p />
|
||||
* Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others
|
||||
* are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like
|
||||
* {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg.
|
||||
|
||||
@@ -23,7 +23,7 @@ import com.mongodb.client.ClientSession;
|
||||
|
||||
/**
|
||||
* Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}.
|
||||
* <br />
|
||||
* <p />
|
||||
* The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
@@ -34,7 +34,7 @@ public interface SessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
@@ -49,7 +49,7 @@ public interface SessionScoped {
|
||||
|
||||
/**
|
||||
* Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close()
|
||||
* closed} when done.
|
||||
*
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
*/
|
||||
package org.springframework.data.mongodb.core;
|
||||
|
||||
import lombok.Value;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import org.bson.codecs.configuration.CodecRegistry;
|
||||
@@ -26,7 +27,6 @@ import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
|
||||
import org.springframework.data.mongodb.SessionAwareMethodInterceptor;
|
||||
import org.springframework.lang.Nullable;
|
||||
import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
import com.mongodb.ClientSessionOptions;
|
||||
import com.mongodb.ConnectionString;
|
||||
@@ -175,16 +175,11 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
* @author Christoph Strobl
|
||||
* @since 2.1
|
||||
*/
|
||||
static final class ClientSessionBoundMongoDbFactory implements ReactiveMongoDatabaseFactory {
|
||||
@Value
|
||||
static class ClientSessionBoundMongoDbFactory implements ReactiveMongoDatabaseFactory {
|
||||
|
||||
private final ClientSession session;
|
||||
private final ReactiveMongoDatabaseFactory delegate;
|
||||
|
||||
ClientSessionBoundMongoDbFactory(ClientSession session, ReactiveMongoDatabaseFactory delegate) {
|
||||
|
||||
this.session = session;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
ClientSession session;
|
||||
ReactiveMongoDatabaseFactory delegate;
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
@@ -273,40 +268,5 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React
|
||||
|
||||
return targetType.cast(factory.getProxy(target.getClass().getClassLoader()));
|
||||
}
|
||||
|
||||
public ClientSession getSession() {
|
||||
return this.session;
|
||||
}
|
||||
|
||||
public ReactiveMongoDatabaseFactory getDelegate() {
|
||||
return this.delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
|
||||
ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o;
|
||||
|
||||
if (!ObjectUtils.nullSafeEquals(this.session, that.session)) {
|
||||
return false;
|
||||
}
|
||||
return ObjectUtils.nullSafeEquals(this.delegate, that.delegate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = ObjectUtils.nullSafeHashCode(this.session);
|
||||
result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "SimpleReactiveMongoDatabaseFactory.ClientSessionBoundMongoDbFactory(session=" + this.getSession()
|
||||
+ ", delegate=" + this.getDelegate() + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,11 +29,8 @@ import org.springframework.util.Assert;
|
||||
import org.springframework.util.ObjectUtils;
|
||||
|
||||
/**
|
||||
* Support class for {@link AggregationExpression} implementations.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Matt Morrissette
|
||||
* @author Mark Paluch
|
||||
* @since 1.10
|
||||
*/
|
||||
abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
@@ -52,6 +49,7 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return toDocument(this.value, context);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Document toDocument(Object value, AggregationOperationContext context) {
|
||||
return new Document(getMongoMethod(), unpack(value, context));
|
||||
}
|
||||
@@ -103,19 +101,17 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return value;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected List<Object> append(Object value, Expand expandList) {
|
||||
|
||||
if (this.value instanceof List) {
|
||||
|
||||
List<Object> clone = new ArrayList<>((List<Object>) this.value);
|
||||
List<Object> clone = new ArrayList<Object>((List) this.value);
|
||||
|
||||
if (value instanceof Collection && Expand.EXPAND_VALUES.equals(expandList)) {
|
||||
clone.addAll((Collection<?>) value);
|
||||
} else {
|
||||
clone.add(value);
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
@@ -133,72 +129,25 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return append(value, Expand.EXPAND_VALUES);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
protected Map<String, Object> append(String key, Object value) {
|
||||
@SuppressWarnings("unchecked")
|
||||
protected java.util.Map<String, Object> append(String key, Object value) {
|
||||
|
||||
Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!");
|
||||
|
||||
Map<String, Object> clone = new LinkedHashMap<>((java.util.Map) this.value);
|
||||
java.util.Map<String, Object> clone = new LinkedHashMap<>((java.util.Map) this.value);
|
||||
clone.put(key, value);
|
||||
return clone;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked", "rawtypes" })
|
||||
protected Map<String, Object> remove(String key) {
|
||||
|
||||
Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!");
|
||||
|
||||
Map<String, Object> clone = new LinkedHashMap<>((java.util.Map) this.value);
|
||||
clone.remove(key);
|
||||
return clone;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append the given key at the position in the underlying {@link LinkedHashMap}.
|
||||
*
|
||||
* @param index
|
||||
* @param key
|
||||
* @param value
|
||||
* @return
|
||||
* @since 3.1
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
protected Map<String, Object> appendAt(int index, String key, Object value) {
|
||||
|
||||
Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!");
|
||||
|
||||
Map<String, Object> clone = new LinkedHashMap<>();
|
||||
|
||||
int i = 0;
|
||||
for (Map.Entry<String, Object> entry : ((Map<String, Object>) this.value).entrySet()) {
|
||||
|
||||
if (i == index) {
|
||||
clone.put(key, value);
|
||||
}
|
||||
if (!entry.getKey().equals(key)) {
|
||||
clone.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
i++;
|
||||
}
|
||||
if (i <= index) {
|
||||
clone.put(key, value);
|
||||
}
|
||||
return clone;
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes" })
|
||||
protected List<Object> values() {
|
||||
|
||||
if (value instanceof List) {
|
||||
return new ArrayList<Object>((List) value);
|
||||
}
|
||||
|
||||
if (value instanceof java.util.Map) {
|
||||
return new ArrayList<Object>(((java.util.Map) value).values());
|
||||
}
|
||||
|
||||
return new ArrayList<>(Collections.singletonList(value));
|
||||
}
|
||||
|
||||
@@ -228,7 +177,7 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
|
||||
Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!");
|
||||
|
||||
return (T) ((Map<String, Object>) this.value).get(key);
|
||||
return (T) ((java.util.Map<String, Object>) this.value).get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -238,11 +187,11 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
* @return
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Map<String, Object> argumentMap() {
|
||||
protected java.util.Map<String, Object> argumentMap() {
|
||||
|
||||
Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!");
|
||||
|
||||
return Collections.unmodifiableMap((java.util.Map<String, Object>) value);
|
||||
return Collections.unmodifiableMap((java.util.Map) value);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -259,7 +208,7 @@ abstract class AbstractAggregationExpression implements AggregationExpression {
|
||||
return false;
|
||||
}
|
||||
|
||||
return ((Map<String, Object>) this.value).containsKey(key);
|
||||
return ((java.util.Map<String, Object>) this.value).containsKey(key);
|
||||
}
|
||||
|
||||
protected abstract String getMongoMethod();
|
||||
|
||||
@@ -142,118 +142,11 @@ public class AccumulatorOperators {
|
||||
return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the
|
||||
* given field to calculate the population covariance of the two.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovariancePop covariancePop(String fieldReference) {
|
||||
return covariancePop().and(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the
|
||||
* given {@link AggregationExpression expression} to calculate the population covariance of the two.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovariancePop covariancePop(AggregationExpression expression) {
|
||||
return covariancePop().and(expression);
|
||||
}
|
||||
|
||||
private CovariancePop covariancePop() {
|
||||
return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the
|
||||
* given field to calculate the sample covariance of the two.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovarianceSamp covarianceSamp(String fieldReference) {
|
||||
return covarianceSamp().and(fieldReference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the
|
||||
* given {@link AggregationExpression expression} to calculate the sample covariance of the two.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public CovarianceSamp covarianceSamp(AggregationExpression expression) {
|
||||
return covarianceSamp().and(expression);
|
||||
}
|
||||
|
||||
private CovarianceSamp covarianceSamp() {
|
||||
return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference)
|
||||
: CovarianceSamp.covarianceSampOf(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates
|
||||
* the exponential moving average of numeric values
|
||||
*
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public ExpMovingAvgBuilder expMovingAvg() {
|
||||
|
||||
ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference)
|
||||
: ExpMovingAvg.expMovingAvgOf(expression);
|
||||
return new ExpMovingAvgBuilder() {
|
||||
|
||||
@Override
|
||||
public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) {
|
||||
return expMovingAvg.n(numberOfHistoricalDocuments);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExpMovingAvg alpha(double exponentialDecayValue) {
|
||||
return expMovingAvg.alpha(exponentialDecayValue);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private boolean usesFieldRef() {
|
||||
return fieldReference != null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for {@link ExpMovingAvg}.
|
||||
*
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface ExpMovingAvgBuilder {
|
||||
|
||||
/**
|
||||
* Define the number of historical documents with significant mathematical weight.
|
||||
*
|
||||
* @param numberOfHistoricalDocuments
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments);
|
||||
|
||||
/**
|
||||
* Define the exponential decay value.
|
||||
*
|
||||
* @param exponentialDecayValue
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
ExpMovingAvg alpha(double exponentialDecayValue);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $sum}.
|
||||
*
|
||||
@@ -765,185 +658,4 @@ public class AccumulatorOperators {
|
||||
return super.toDocument(value, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $covariancePop}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class CovariancePop extends AbstractAggregationExpression {
|
||||
|
||||
private CovariancePop(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public static CovariancePop covariancePopOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new CovariancePop(asFields(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public static CovariancePop covariancePopOf(AggregationExpression expression) {
|
||||
return new CovariancePop(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public CovariancePop and(String fieldReference) {
|
||||
return new CovariancePop(append(asFields(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovariancePop} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovariancePop}.
|
||||
*/
|
||||
public CovariancePop and(AggregationExpression expression) {
|
||||
return new CovariancePop(append(expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$covariancePop";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $covarianceSamp}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class CovarianceSamp extends AbstractAggregationExpression {
|
||||
|
||||
private CovarianceSamp(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp}.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public static CovarianceSamp covarianceSampOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new CovarianceSamp(asFields(fieldReference));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public static CovarianceSamp covarianceSampOf(AggregationExpression expression) {
|
||||
return new CovarianceSamp(Collections.singletonList(expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public CovarianceSamp and(String fieldReference) {
|
||||
return new CovarianceSamp(append(asFields(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates new {@link CovarianceSamp} with all previously added arguments appending the given one.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link CovarianceSamp}.
|
||||
*/
|
||||
public CovarianceSamp and(AggregationExpression expression) {
|
||||
return new CovarianceSamp(append(expression));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$covarianceSamp";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link ExpMovingAvg} calculates the exponential moving average of numeric values.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class ExpMovingAvg extends AbstractAggregationExpression {
|
||||
|
||||
private ExpMovingAvg(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input.
|
||||
*
|
||||
* @param fieldReference must not be {@literal null}.
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public static ExpMovingAvg expMovingAvgOf(String fieldReference) {
|
||||
return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value
|
||||
* to be used as input.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) {
|
||||
return new ExpMovingAvg(Collections.singletonMap("input", expression));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the number of historical documents with significant mathematical weight. <br />
|
||||
* Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both!
|
||||
*
|
||||
* @param numberOfHistoricalDocuments
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) {
|
||||
return new ExpMovingAvg(append("N", numberOfHistoricalDocuments));
|
||||
}
|
||||
|
||||
/**
|
||||
* Define the exponential decay value. <br />
|
||||
* Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both!
|
||||
*
|
||||
* @param exponentialDecayValue
|
||||
* @return new instance of {@link ExpMovingAvg}.
|
||||
*/
|
||||
public ExpMovingAvg alpha(double exponentialDecayValue) {
|
||||
return new ExpMovingAvg(append("alpha", exponentialDecayValue));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$expMovingAvg";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,5 +201,4 @@ public class AddFieldsOperation extends DocumentEnhancingOperation {
|
||||
AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -227,7 +227,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is
|
||||
* an alias for {@code $addFields}.
|
||||
*
|
||||
@@ -499,17 +499,6 @@ public class Aggregation {
|
||||
return new MatchOperation(criteria);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link MatchOperation} using the given {@link AggregationExpression}.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link MatchOperation}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public static MatchOperation match(AggregationExpression expression) {
|
||||
return new MatchOperation(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The
|
||||
* {@code distanceField} defines output field that contains the calculated distance.
|
||||
@@ -726,7 +715,7 @@ public class Aggregation {
|
||||
|
||||
/**
|
||||
* Converts this {@link Aggregation} specification to a {@link Document}.
|
||||
* <br />
|
||||
* <p/>
|
||||
* MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render
|
||||
* an aggregation pipeline.
|
||||
*
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.data.mongodb.MongoExpression;
|
||||
|
||||
/**
|
||||
* An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like
|
||||
@@ -26,37 +25,7 @@ import org.springframework.data.mongodb.MongoExpression;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
*/
|
||||
public interface AggregationExpression extends MongoExpression {
|
||||
|
||||
/**
|
||||
* Create an {@link AggregationExpression} out of a given {@link MongoExpression} to ensure the resulting
|
||||
* {@link MongoExpression#toDocument() Document} is mapped against the {@link AggregationOperationContext}. <br />
|
||||
* If the given expression is already an {@link AggregationExpression} the very same instance is returned.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
* @since 3.2
|
||||
*/
|
||||
static AggregationExpression from(MongoExpression expression) {
|
||||
|
||||
if (expression instanceof AggregationExpression) {
|
||||
return AggregationExpression.class.cast(expression);
|
||||
}
|
||||
|
||||
return (context) -> context.getMappedObject(expression.toDocument());
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain the as is (unmapped) representation of the {@link AggregationExpression}. Use
|
||||
* {@link #toDocument(AggregationOperationContext)} with a matching {@link AggregationOperationContext context} to
|
||||
* engage domain type mapping including field name resolution.
|
||||
*
|
||||
* @see org.springframework.data.mongodb.MongoExpression#toDocument()
|
||||
*/
|
||||
@Override
|
||||
default Document toDocument() {
|
||||
return toDocument(Aggregation.DEFAULT_CONTEXT);
|
||||
}
|
||||
public interface AggregationExpression {
|
||||
|
||||
/**
|
||||
* Turns the {@link AggregationExpression} into a {@link Document} within the given
|
||||
|
||||
@@ -32,7 +32,6 @@ import org.springframework.util.Assert;
|
||||
* @author Oliver Gierke
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @author Yadhukrishna S Pai
|
||||
* @see Aggregation#withOptions(AggregationOptions)
|
||||
* @see TypedAggregation#withOptions(AggregationOptions)
|
||||
* @since 1.6
|
||||
@@ -46,17 +45,14 @@ public class AggregationOptions {
|
||||
private static final String COLLATION = "collation";
|
||||
private static final String COMMENT = "comment";
|
||||
private static final String MAX_TIME = "maxTimeMS";
|
||||
private static final String HINT = "hint";
|
||||
|
||||
private final boolean allowDiskUse;
|
||||
private final boolean explain;
|
||||
private final Optional<Document> cursor;
|
||||
private final Optional<Collation> collation;
|
||||
private final Optional<String> comment;
|
||||
private final Optional<Document> hint;
|
||||
private Duration maxTime = Duration.ZERO;
|
||||
private ResultOptions resultOptions = ResultOptions.READ;
|
||||
private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED;
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
@@ -75,13 +71,13 @@ public class AggregationOptions {
|
||||
* @param allowDiskUse whether to off-load intensive sort-operations to disk.
|
||||
* @param explain whether to get the execution plan for the aggregation instead of the actual results.
|
||||
* @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the
|
||||
* aggregation.
|
||||
* aggregation.
|
||||
* @param collation collation for string comparison. Can be {@literal null}.
|
||||
* @since 2.0
|
||||
*/
|
||||
public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor,
|
||||
@Nullable Collation collation) {
|
||||
this(allowDiskUse, explain, cursor, collation, null, null);
|
||||
this(allowDiskUse, explain, cursor, collation, null);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -90,37 +86,19 @@ public class AggregationOptions {
|
||||
* @param allowDiskUse whether to off-load intensive sort-operations to disk.
|
||||
* @param explain whether to get the execution plan for the aggregation instead of the actual results.
|
||||
* @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the
|
||||
* aggregation.
|
||||
* aggregation.
|
||||
* @param collation collation for string comparison. Can be {@literal null}.
|
||||
* @param comment execution comment. Can be {@literal null}.
|
||||
* @since 2.2
|
||||
*/
|
||||
public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor,
|
||||
@Nullable Collation collation, @Nullable String comment) {
|
||||
this(allowDiskUse, explain, cursor, collation, comment, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link AggregationOptions}.
|
||||
*
|
||||
* @param allowDiskUse whether to off-load intensive sort-operations to disk.
|
||||
* @param explain whether to get the execution plan for the aggregation instead of the actual results.
|
||||
* @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the
|
||||
* aggregation.
|
||||
* @param collation collation for string comparison. Can be {@literal null}.
|
||||
* @param comment execution comment. Can be {@literal null}.
|
||||
* @param hint can be {@literal null}, used to provide an index that would be forcibly used by query optimizer.
|
||||
* @since 3.1
|
||||
*/
|
||||
private AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor,
|
||||
@Nullable Collation collation, @Nullable String comment, @Nullable Document hint) {
|
||||
|
||||
this.allowDiskUse = allowDiskUse;
|
||||
this.explain = explain;
|
||||
this.cursor = Optional.ofNullable(cursor);
|
||||
this.collation = Optional.ofNullable(collation);
|
||||
this.comment = Optional.ofNullable(comment);
|
||||
this.hint = Optional.ofNullable(hint);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -152,9 +130,8 @@ public class AggregationOptions {
|
||||
Collation collation = document.containsKey(COLLATION) ? Collation.from(document.get(COLLATION, Document.class))
|
||||
: null;
|
||||
String comment = document.getString(COMMENT);
|
||||
Document hint = document.get(HINT, Document.class);
|
||||
|
||||
AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint);
|
||||
AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
if (document.containsKey(MAX_TIME)) {
|
||||
options.maxTime = Duration.ofMillis(document.getLong(MAX_TIME));
|
||||
}
|
||||
@@ -235,16 +212,6 @@ public class AggregationOptions {
|
||||
return comment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the hint used to to fulfill the aggregation.
|
||||
*
|
||||
* @return never {@literal null}.
|
||||
* @since 3.1
|
||||
*/
|
||||
public Optional<Document> getHint() {
|
||||
return hint;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior.
|
||||
* @since 3.0
|
||||
@@ -262,14 +229,6 @@ public class AggregationOptions {
|
||||
return ResultOptions.SKIP.equals(resultOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the domain type mapping strategy do apply. Never {@literal null}.
|
||||
* @since 3.2
|
||||
*/
|
||||
public DomainTypeMapping getDomainTypeMapping() {
|
||||
return domainTypeMapping;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration
|
||||
* applied.
|
||||
@@ -289,10 +248,6 @@ public class AggregationOptions {
|
||||
result.put(EXPLAIN, explain);
|
||||
}
|
||||
|
||||
if (result.containsKey(HINT)) {
|
||||
hint.ifPresent(val -> result.append(HINT, val));
|
||||
}
|
||||
|
||||
if (!result.containsKey(CURSOR)) {
|
||||
cursor.ifPresent(val -> result.put(CURSOR, val));
|
||||
}
|
||||
@@ -322,7 +277,6 @@ public class AggregationOptions {
|
||||
cursor.ifPresent(val -> document.put(CURSOR, val));
|
||||
collation.ifPresent(val -> document.append(COLLATION, val.toDocument()));
|
||||
comment.ifPresent(val -> document.append(COMMENT, val));
|
||||
hint.ifPresent(val -> document.append(HINT, val));
|
||||
|
||||
if (hasExecutionTimeLimit()) {
|
||||
document.append(MAX_TIME, maxTime.toMillis());
|
||||
@@ -364,10 +318,8 @@ public class AggregationOptions {
|
||||
private @Nullable Document cursor;
|
||||
private @Nullable Collation collation;
|
||||
private @Nullable String comment;
|
||||
private @Nullable Document hint;
|
||||
private @Nullable Duration maxTime;
|
||||
private @Nullable ResultOptions resultOptions;
|
||||
private @Nullable DomainTypeMapping domainTypeMapping;
|
||||
|
||||
/**
|
||||
* Defines whether to off-load intensive sort-operations to disk.
|
||||
@@ -444,24 +396,11 @@ public class AggregationOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a hint that is used by query optimizer to to fulfill the aggregation.
|
||||
*
|
||||
* @param hint can be {@literal null}.
|
||||
* @return this.
|
||||
* @since 3.1
|
||||
*/
|
||||
public Builder hint(@Nullable Document hint) {
|
||||
|
||||
this.hint = hint;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the time limit for processing.
|
||||
*
|
||||
* @param maxTime {@link Duration#ZERO} is used for the default unbounded behavior. {@link Duration#isNegative()
|
||||
* Negative} values will be ignored.
|
||||
* Negative} values will be ignored.
|
||||
* @return this.
|
||||
* @since 3.0
|
||||
*/
|
||||
@@ -485,44 +424,6 @@ public class AggregationOptions {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a strict domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field}
|
||||
* annotations throwing errors for non-existent, but referenced fields.
|
||||
*
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Builder strictMapping() {
|
||||
|
||||
this.domainTypeMapping = DomainTypeMapping.STRICT;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a relaxed domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field}
|
||||
* annotations using the user provided name if a referenced field does not exist.
|
||||
*
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Builder relaxedMapping() {
|
||||
|
||||
this.domainTypeMapping = DomainTypeMapping.RELAXED;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply no domain type mapping at all taking the pipeline as-is.
|
||||
*
|
||||
* @return this.
|
||||
* @since 3.2
|
||||
*/
|
||||
public Builder noMapping() {
|
||||
|
||||
this.domainTypeMapping = DomainTypeMapping.NONE;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link AggregationOptions} instance with the given configuration.
|
||||
*
|
||||
@@ -530,16 +431,13 @@ public class AggregationOptions {
|
||||
*/
|
||||
public AggregationOptions build() {
|
||||
|
||||
AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint);
|
||||
AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment);
|
||||
if (maxTime != null) {
|
||||
options.maxTime = maxTime;
|
||||
}
|
||||
if (resultOptions != null) {
|
||||
options.resultOptions = resultOptions;
|
||||
}
|
||||
if (domainTypeMapping != null) {
|
||||
options.domainTypeMapping = domainTypeMapping;
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
@@ -559,27 +457,4 @@ public class AggregationOptions {
|
||||
*/
|
||||
READ;
|
||||
}
|
||||
|
||||
/**
|
||||
* Aggregation pipeline Domain type mappings supported by the mapping layer.
|
||||
*
|
||||
* @since 3.2
|
||||
*/
|
||||
public enum DomainTypeMapping {
|
||||
|
||||
/**
|
||||
* Mapping throws errors for non-existent, but referenced fields.
|
||||
*/
|
||||
STRICT,
|
||||
|
||||
/**
|
||||
* Fields that do not exist in the model are treated as-is.
|
||||
*/
|
||||
RELAXED,
|
||||
|
||||
/**
|
||||
* Do not attempt to map fields against the model and treat the entire pipeline as-is.
|
||||
*/
|
||||
NONE
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import org.bson.Document;
|
||||
import org.springframework.util.Assert;
|
||||
@@ -27,7 +26,6 @@ import org.springframework.util.Assert;
|
||||
* The {@link AggregationPipeline} holds the collection of {@link AggregationOperation aggregation stages}.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @since 3.0.2
|
||||
*/
|
||||
public class AggregationPipeline {
|
||||
@@ -47,8 +45,6 @@ public class AggregationPipeline {
|
||||
* @param aggregationOperations must not be {@literal null}.
|
||||
*/
|
||||
public AggregationPipeline(List<AggregationOperation> aggregationOperations) {
|
||||
|
||||
Assert.notNull(aggregationOperations, "AggregationOperations must not be null!");
|
||||
pipeline = new ArrayList<>(aggregationOperations);
|
||||
}
|
||||
|
||||
@@ -86,77 +82,30 @@ public class AggregationPipeline {
|
||||
*/
|
||||
public boolean isOutOrMerge() {
|
||||
|
||||
if (isEmpty()) {
|
||||
if (pipeline.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AggregationOperation operation = pipeline.get(pipeline.size() - 1);
|
||||
return isOut(operation) || isMerge(operation);
|
||||
String operator = pipeline.get(pipeline.size() - 1).getOperator();
|
||||
return operator.equals("$out") || operator.equals("$merge");
|
||||
}
|
||||
|
||||
void verify() {
|
||||
|
||||
// check $out/$merge is the last operation if it exists
|
||||
for (AggregationOperation operation : pipeline) {
|
||||
for (AggregationOperation aggregationOperation : pipeline) {
|
||||
|
||||
if (isOut(operation) && !isLast(operation)) {
|
||||
if (aggregationOperation instanceof OutOperation && !isLast(aggregationOperation)) {
|
||||
throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline.");
|
||||
}
|
||||
|
||||
if (isMerge(operation) && !isLast(operation)) {
|
||||
if (aggregationOperation instanceof MergeOperation && !isLast(aggregationOperation)) {
|
||||
throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return whether this aggregation pipeline defines a {@code $unionWith} stage that may contribute documents from
|
||||
* other collections. Checking for presence of union stages is useful when attempting to determine the aggregation
|
||||
* element type for mapping metadata computation.
|
||||
*
|
||||
* @return {@literal true} the aggregation pipeline makes use of {@code $unionWith}.
|
||||
* @since 3.1
|
||||
*/
|
||||
public boolean containsUnionWith() {
|
||||
return containsOperation(AggregationPipeline::isUnionWith);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@literal true} if the pipeline does not contain any stages.
|
||||
* @since 3.1
|
||||
*/
|
||||
public boolean isEmpty() {
|
||||
return pipeline.isEmpty();
|
||||
}
|
||||
|
||||
private boolean containsOperation(Predicate<AggregationOperation> predicate) {
|
||||
|
||||
if (isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (AggregationOperation element : pipeline) {
|
||||
if (predicate.test(element)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isLast(AggregationOperation aggregationOperation) {
|
||||
return pipeline.indexOf(aggregationOperation) == pipeline.size() - 1;
|
||||
}
|
||||
|
||||
private static boolean isUnionWith(AggregationOperation operator) {
|
||||
return operator instanceof UnionWithOperation || operator.getOperator().equals("$unionWith");
|
||||
}
|
||||
|
||||
private static boolean isMerge(AggregationOperation operator) {
|
||||
return operator instanceof MergeOperation || operator.getOperator().equals("$merge");
|
||||
}
|
||||
|
||||
private static boolean isOut(AggregationOperation operator) {
|
||||
return operator instanceof OutOperation || operator.getOperator().equals("$out");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,15 +24,15 @@ import org.springframework.util.Assert;
|
||||
* expression</a>. <br />
|
||||
* <br />
|
||||
* <strong>Samples:</strong> <br />
|
||||
* <pre>
|
||||
* <code>
|
||||
* <pre>
|
||||
* // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
|
||||
* expressionOf("qty > 100 && qty < 250);
|
||||
*
|
||||
* // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
|
||||
* expressionOf("cond(a >= 42, 'answer', 'no-answer')");
|
||||
* </code>
|
||||
* </pre>
|
||||
* </code>
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
|
||||
@@ -71,7 +71,8 @@ import org.springframework.util.Assert;
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @author Mark Paluch
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline">MongoDB
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/method/db.collection.update/#update-with-aggregation-pipeline">MongoDB
|
||||
* Reference Documentation</a>
|
||||
* @since 3.0
|
||||
*/
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -28,7 +28,8 @@ import org.springframework.util.Assert;
|
||||
* We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating
|
||||
* instances of this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/">https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/">https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/</a>
|
||||
* @see BucketOperationSupport
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
@@ -247,7 +248,8 @@ public class BucketAutoOperation extends BucketOperationSupport<BucketAutoOperat
|
||||
/**
|
||||
* Supported MongoDB granularities.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity">https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity</a>
|
||||
* @see <a
|
||||
* href="https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity>https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity</a>
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public enum Granularities implements Granularity {
|
||||
|
||||
@@ -31,7 +31,8 @@ import org.springframework.util.Assert;
|
||||
* We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of
|
||||
* this class directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.org/manual/reference/aggregation/bucket/">https://docs.mongodb.org/manual/reference/aggregation/bucket/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.org/manual/reference/aggregation/bucket/">https://docs.mongodb.org/manual/reference/aggregation/bucket/</a>
|
||||
* @see BucketOperationSupport
|
||||
* @author Mark Paluch
|
||||
* @since 1.10
|
||||
@@ -103,7 +104,7 @@ public class BucketOperation extends BucketOperationSupport<BucketOperation, Buc
|
||||
return new Document(getOperator(), options);
|
||||
}
|
||||
|
||||
/*
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator()
|
||||
*/
|
||||
|
||||
@@ -17,7 +17,6 @@ package org.springframework.data.mongodb.core.aggregation;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@@ -236,7 +235,7 @@ public class ConditionalOperators {
|
||||
*
|
||||
* @author Mark Paluch
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/</a>
|
||||
*/
|
||||
public static class IfNull implements AggregationExpression {
|
||||
|
||||
@@ -252,8 +251,7 @@ public class ConditionalOperators {
|
||||
/**
|
||||
* Creates new {@link IfNull}.
|
||||
*
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static ThenBuilder ifNull(String fieldReference) {
|
||||
@@ -266,7 +264,7 @@ public class ConditionalOperators {
|
||||
* Creates new {@link IfNull}.
|
||||
*
|
||||
* @param expression the expression to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* {@literal null}.
|
||||
* @return never {@literal null}.
|
||||
*/
|
||||
public static ThenBuilder ifNull(AggregationExpression expression) {
|
||||
@@ -284,29 +282,19 @@ public class ConditionalOperators {
|
||||
|
||||
List<Object> list = new ArrayList<Object>();
|
||||
|
||||
if (condition instanceof Collection) {
|
||||
for (Object val : ((Collection) this.condition)) {
|
||||
list.add(mapCondition(val, context));
|
||||
}
|
||||
if (condition instanceof Field) {
|
||||
list.add(context.getReference((Field) condition).toString());
|
||||
} else if (condition instanceof AggregationExpression) {
|
||||
list.add(((AggregationExpression) condition).toDocument(context));
|
||||
} else {
|
||||
list.add(mapCondition(condition, context));
|
||||
list.add(condition);
|
||||
}
|
||||
|
||||
list.add(resolve(value, context));
|
||||
|
||||
return new Document("$ifNull", list);
|
||||
}
|
||||
|
||||
private Object mapCondition(Object condition, AggregationOperationContext context) {
|
||||
|
||||
if (condition instanceof Field) {
|
||||
return context.getReference((Field) condition).toString();
|
||||
} else if (condition instanceof AggregationExpression) {
|
||||
return ((AggregationExpression) condition).toDocument(context);
|
||||
} else {
|
||||
return condition;
|
||||
}
|
||||
}
|
||||
|
||||
private Object resolve(Object value, AggregationOperationContext context) {
|
||||
|
||||
if (value instanceof Field) {
|
||||
@@ -327,48 +315,28 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* {@literal null}.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder ifNull(String fieldReference);
|
||||
|
||||
/**
|
||||
* @param expression the expression to check for a {@literal null} value, field name must not be {@literal null}
|
||||
* or empty.
|
||||
* @return the {@link ThenBuilder}.
|
||||
* or empty.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder ifNull(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public interface OrBuilder {
|
||||
|
||||
/**
|
||||
* @param fieldReference the field to check for a {@literal null} value, field reference must not be
|
||||
* {@literal null}.
|
||||
* @return the {@link ThenBuilder}
|
||||
*/
|
||||
ThenBuilder orIfNull(String fieldReference);
|
||||
|
||||
/**
|
||||
* @param expression the expression to check for a {@literal null} value,
|
||||
* @return the {@link ThenBuilder}.
|
||||
*/
|
||||
ThenBuilder orIfNull(AggregationExpression expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* @author Mark Paluch
|
||||
*/
|
||||
public interface ThenBuilder extends OrBuilder {
|
||||
public interface ThenBuilder {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a
|
||||
* {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB
|
||||
* representation but must not be {@literal null}.
|
||||
* {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB
|
||||
* representation but must not be {@literal null}.
|
||||
* @return new instance of {@link IfNull}.
|
||||
*/
|
||||
IfNull then(Object value);
|
||||
@@ -393,10 +361,9 @@ public class ConditionalOperators {
|
||||
*/
|
||||
static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder {
|
||||
|
||||
private @Nullable List<Object> conditions;
|
||||
private @Nullable Object condition;
|
||||
|
||||
private IfNullOperatorBuilder() {
|
||||
conditions = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -414,7 +381,7 @@ public class ConditionalOperators {
|
||||
public ThenBuilder ifNull(String fieldReference) {
|
||||
|
||||
Assert.hasText(fieldReference, "FieldReference name must not be null or empty!");
|
||||
this.conditions.add(Fields.field(fieldReference));
|
||||
this.condition = Fields.field(fieldReference);
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -425,25 +392,15 @@ public class ConditionalOperators {
|
||||
public ThenBuilder ifNull(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "AggregationExpression name must not be null or empty!");
|
||||
this.conditions.add(expression);
|
||||
this.condition = expression;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ThenBuilder orIfNull(String fieldReference) {
|
||||
return ifNull(fieldReference);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ThenBuilder orIfNull(AggregationExpression expression) {
|
||||
return ifNull(expression);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object)
|
||||
*/
|
||||
public IfNull then(Object value) {
|
||||
return new IfNull(conditions, value);
|
||||
return new IfNull(condition, value);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -452,7 +409,7 @@ public class ConditionalOperators {
|
||||
public IfNull thenValueOf(String fieldReference) {
|
||||
|
||||
Assert.notNull(fieldReference, "FieldReference must not be null!");
|
||||
return new IfNull(conditions, Fields.field(fieldReference));
|
||||
return new IfNull(condition, Fields.field(fieldReference));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
@@ -461,7 +418,7 @@ public class ConditionalOperators {
|
||||
public IfNull thenValueOf(AggregationExpression expression) {
|
||||
|
||||
Assert.notNull(expression, "Expression must not be null!");
|
||||
return new IfNull(conditions, expression);
|
||||
return new IfNull(condition, expression);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -501,7 +458,7 @@ public class ConditionalOperators {
|
||||
public static Switch switchCases(List<CaseOperator> conditions) {
|
||||
|
||||
Assert.notNull(conditions, "Conditions must not be null!");
|
||||
return new Switch(Collections.<String, Object> singletonMap("branches", new ArrayList<CaseOperator>(conditions)));
|
||||
return new Switch(Collections.<String, Object>singletonMap("branches", new ArrayList<CaseOperator>(conditions)));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -588,7 +545,7 @@ public class ConditionalOperators {
|
||||
* @author Mark Paluch
|
||||
* @author Christoph Strobl
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/</a>
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/</a>
|
||||
*/
|
||||
public static class Cond implements AggregationExpression {
|
||||
|
||||
@@ -849,8 +806,8 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the condition evaluates {@literal true}. Can be a {@link Document}, a
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* @return the {@link OtherwiseBuilder}
|
||||
*/
|
||||
OtherwiseBuilder then(Object value);
|
||||
@@ -875,8 +832,8 @@ public class ConditionalOperators {
|
||||
|
||||
/**
|
||||
* @param value the value to be used if the condition evaluates {@literal false}. Can be a {@link Document}, a
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* value that is supported by MongoDB or a value that can be converted to a MongoDB representation but
|
||||
* must not be {@literal null}.
|
||||
* @return the {@link Cond}
|
||||
*/
|
||||
Cond otherwise(Object value);
|
||||
@@ -904,7 +861,8 @@ public class ConditionalOperators {
|
||||
private @Nullable Object condition;
|
||||
private @Nullable Object thenValue;
|
||||
|
||||
private ConditionalExpressionBuilder() {}
|
||||
private ConditionalExpressionBuilder() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new builder for {@link Cond}.
|
||||
|
||||
@@ -231,17 +231,6 @@ public class ConvertOperators {
|
||||
return ToString.toString(valueObject());
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to
|
||||
* radians.
|
||||
*
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
* @since 3.3
|
||||
*/
|
||||
public DegreesToRadians convertDegreesToRadians() {
|
||||
return DegreesToRadians.degreesToRadians(valueObject());
|
||||
}
|
||||
|
||||
private Convert createConvert() {
|
||||
return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression);
|
||||
}
|
||||
@@ -328,9 +317,9 @@ public class ConvertOperators {
|
||||
* <dt>1</dt>
|
||||
* <dd>double</dd>
|
||||
* <dt>2</dt>
|
||||
* <dd>string</dd>
|
||||
* <dd>string</li>
|
||||
* <dt>7</dt>
|
||||
* <dd>objectId</dd>
|
||||
* <dd>objectId</li>
|
||||
* <dt>8</dt>
|
||||
* <dd>bool</dd>
|
||||
* <dt>9</dt>
|
||||
@@ -703,52 +692,4 @@ public class ConvertOperators {
|
||||
return "$toString";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians.
|
||||
*
|
||||
* @author Christoph Strobl
|
||||
* @since 3.3
|
||||
*/
|
||||
public static class DegreesToRadians extends AbstractAggregationExpression {
|
||||
|
||||
private DegreesToRadians(Object value) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians.
|
||||
*
|
||||
* @param fieldName must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadiansOf(String fieldName) {
|
||||
return degreesToRadians(Fields.field(fieldName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians.
|
||||
*
|
||||
* @param expression must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) {
|
||||
return degreesToRadians(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians.
|
||||
*
|
||||
* @param value must not be {@literal null}.
|
||||
* @return new instance of {@link DegreesToRadians}.
|
||||
*/
|
||||
public static DegreesToRadians degreesToRadians(Object value) {
|
||||
return new DegreesToRadians(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getMongoMethod() {
|
||||
return "$degreesToRadians";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,8 @@ import org.springframework.util.Assert;
|
||||
* We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class
|
||||
* directly.
|
||||
*
|
||||
* @see <a href="https://docs.mongodb.com/manual/reference/operator/aggregation/count/#pipe._S_count">https://docs.mongodb.com/manual/reference/operator/aggregation/count/</a>
|
||||
* @see <a href=
|
||||
* "https://docs.mongodb.com/manual/reference/operator/aggregation/count/#pipe._S_count">https://docs.mongodb.com/manual/reference/operator/aggregation/count/</a>
|
||||
* @author Mark Paluch
|
||||
* @since 1.10
|
||||
*/
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user